diff --git a/.circleci/config.yml b/.circleci/config.yml index 92ba22e97a74..016711ba2392 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -70,6 +70,11 @@ setup_env: &setup_env name: "Setup environment" command: ./build-system/scripts/setup_env "$CIRCLE_SHA1" "$CIRCLE_TAG" "$CIRCLE_JOB" "$CIRCLE_REPOSITORY_URL" "$CIRCLE_BRANCH" "$CIRCLE_PULL_REQUEST" +defaults_e2e_test: &defaults_e2e_test + docker: + - image: aztecprotocol/alpine-build-image + resource_class: small + jobs: # Dynamically filter our code, quickly figuring out which jobs we can skip. generate-config: @@ -589,9 +594,6 @@ jobs: aztec_manifest_key: end-to-end e2e-2-pxes: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -599,11 +601,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_2_pxes.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-note-getter: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -611,11 +611,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_note_getter.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-counter: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -623,11 +621,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_counter_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-private-voting: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -635,11 +631,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_private_voting_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-multiple-accounts-1-enc-key: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -647,11 +641,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_multiple_accounts_1_enc_key.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-deploy-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -659,11 +651,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_deploy_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-lending-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -671,11 +661,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_lending_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-token-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -683,11 +671,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_token_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-authwit-test: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -695,11 +681,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_authwit.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-blacklist-token-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -707,6 +691,7 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_blacklist_token_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test # TODO(3458): Investigate intermittent failure # e2e-slow-tree: @@ -722,9 +707,6 @@ jobs: # aztec_manifest_key: end-to-end e2e-sandbox-example: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -732,11 +714,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_sandbox_example.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-state-vars: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -744,11 +724,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_state_vars.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-block-building: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -756,11 +734,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_block_building.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-nested-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -768,11 +744,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_nested_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-static-calls: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -780,11 +754,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_static_calls.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-delegate-calls: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -792,11 +764,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_delegate_calls.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-non-contract-account: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -804,11 +774,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_non_contract_account.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-cross-chain-messaging: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -816,11 +784,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_cross_chain_messaging.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-public-cross-chain-messaging: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -828,11 +794,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_public_cross_chain_messaging.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-public-to-private-messaging: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -840,11 +804,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_public_to_private_messaging.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-account-contracts: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -852,11 +814,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_account_contracts.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-escrow-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -864,11 +824,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_escrow_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-inclusion-proofs-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -876,11 +834,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_inclusion_proofs_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-pending-note-hashes-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -888,11 +844,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_pending_note_hashes_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-ordering: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -900,11 +854,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_ordering.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test uniswap-trade-on-l1-from-l2: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -912,11 +864,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=uniswap_trade_on_l1_from_l2.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test integration-archiver-l1-to-l2: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -924,11 +874,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=integration_archiver_l1_to_l2.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test integration-l1-publisher: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -936,11 +884,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=integration_l1_publisher.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-cli: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -948,11 +894,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_cli.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-persistence: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -960,11 +904,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose-no-sandbox.yml TEST=e2e_persistence.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-browser: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -972,11 +914,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_aztec_js_browser.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-card-game: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -984,11 +924,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_card_game.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-avm-simulator: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -996,11 +934,9 @@ jobs: name: "Test" command: AVM_ENABLED=1 cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_avm_simulator.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-fees: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1008,11 +944,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_fees.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-dapp-subscription: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1020,11 +954,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=e2e_dapp_subscription.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test pxe: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1032,11 +964,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=pxe_sandbox.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test cli-docs-sandbox: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1044,11 +974,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=cli_docs_sandbox.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test e2e-docs-examples: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1056,11 +984,9 @@ jobs: name: "Test" command: AVM_ENABLED=1 cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=docs_examples_test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test guides-writing-an-account-contract: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1068,11 +994,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=guides/writing_an_account_contract.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test guides-dapp-testing: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1080,11 +1004,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=guides/dapp_testing.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test guides-sample-dapp: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1092,11 +1014,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=sample-dapp aztec_manifest_key: end-to-end + <<: *defaults_e2e_test guides-up-quick-start: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1104,11 +1024,9 @@ jobs: name: "Test" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose.yml TEST=guides/up_quick_start.test.ts aztec_manifest_key: end-to-end + <<: *defaults_e2e_test bench-publish-rollup: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1116,11 +1034,9 @@ jobs: name: "Benchmark" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose-no-sandbox.yml TEST=benchmarks/bench_publish_rollup.test.ts DEBUG=aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees aztec_manifest_key: end-to-end + <<: *defaults_e2e_test bench-process-history: - docker: - - image: aztecprotocol/alpine-build-image - resource_class: small steps: - *checkout - *setup_env @@ -1128,6 +1044,7 @@ jobs: name: "Benchmark" command: cond_spot_run_compose end-to-end 4 ./scripts/docker-compose-no-sandbox.yml TEST=benchmarks/bench_process_history.test.ts DEBUG=aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees aztec_manifest_key: end-to-end + <<: *defaults_e2e_test build-docs: machine: diff --git a/build_manifest.yml b/build_manifest.yml index c2a3678dca4b..7e64c365cfbf 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -10,7 +10,7 @@ # dependencies: An array of other projects that this project depends on. # runDependencies: Additional projects that are needed to run a container/compose file. Ensures they're pulled first. -# Builds noir for x86_64 and arm64, creating a runnable container just with nargo. +# Builds noir for x86_64 and arm64, creating a runnable container just with nargo + acvm. noir: buildDir: noir dockerfile: Dockerfile.native @@ -169,6 +169,7 @@ yarn-project: - noir-packages - l1-contracts - noir-projects + - noir multiarch: host # A runnable container, sets entrypoint to be the aztec infrastructure entrypoint. @@ -215,6 +216,7 @@ end-to-end: - noir-packages - l1-contracts - noir-projects + - noir runDependencies: - aztec diff --git a/noir/.rebuild_patterns_native b/noir/.rebuild_patterns_native index dea963264023..c1b24da403b8 100644 --- a/noir/.rebuild_patterns_native +++ b/noir/.rebuild_patterns_native @@ -12,3 +12,4 @@ ^noir/noir-repo/tooling/nargo_toml ^noir/noir-repo/tooling/nargo_fmt ^noir/noir-repo/tooling/noirc_abi +^noir/noir-repo/tooling/acvm_cli diff --git a/noir/Dockerfile.native b/noir/Dockerfile.native index cd0122646bd2..73a29b3de21e 100644 --- a/noir/Dockerfile.native +++ b/noir/Dockerfile.native @@ -12,4 +12,5 @@ FROM ubuntu:focal # Install git as nargo needs it to clone. RUN apt-get update && apt-get install -y git tini && rm -rf /var/lib/apt/lists/* && apt-get clean COPY --from=0 /usr/src/noir/noir-repo/target/release/nargo /usr/src/noir/noir-repo/target/release/nargo +COPY --from=0 /usr/src/noir/noir-repo/target/release/acvm /usr/src/noir/noir-repo/target/release/acvm ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/noir/noir-repo/target/release/nargo"] diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index b2b6f8037bbe..83ac37442748 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -66,6 +66,26 @@ dependencies = [ "thiserror", ] +[[package]] +name = "acvm_cli" +version = "0.40.0" +dependencies = [ + "acir", + "acvm", + "bn254_blackbox_solver", + "clap", + "color-eyre", + "const_format", + "nargo", + "paste", + "proptest", + "rand 0.8.5", + "thiserror", + "toml 0.7.6", + "tracing-appender", + "tracing-subscriber", +] + [[package]] name = "acvm_js" version = "0.40.0" diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 7d5da7b00d0f..572042f1a6a2 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -26,6 +26,7 @@ members = [ "tooling/nargo_toml", "tooling/noirc_abi", "tooling/noirc_abi_wasm", + "tooling/acvm_cli", # ACVM "acvm-repo/acir_field", "acvm-repo/acir", @@ -36,7 +37,7 @@ members = [ "acvm-repo/blackbox_solver", "acvm-repo/bn254_blackbox_solver", ] -default-members = ["tooling/nargo_cli"] +default-members = ["tooling/nargo_cli", "tooling/acvm_cli"] resolver = "2" [workspace.package] @@ -78,6 +79,7 @@ noir_lsp = { path = "tooling/lsp" } noir_debugger = { path = "tooling/debugger" } noirc_abi = { path = "tooling/noirc_abi" } bb_abstraction_leaks = { path = "tooling/bb_abstraction_leaks" } +acvm_cli = { path = "tooling/acvm_cli" } # LSP async-lsp = { version = "0.1.0", default-features = false } diff --git a/noir/noir-repo/docs/scripts/codegen_nargo_reference.sh b/noir/noir-repo/docs/scripts/codegen_nargo_reference.sh index 4ff7d43d1428..6a9fda9420b6 100755 --- a/noir/noir-repo/docs/scripts/codegen_nargo_reference.sh +++ b/noir/noir-repo/docs/scripts/codegen_nargo_reference.sh @@ -30,4 +30,4 @@ sidebar_position: 0 --- " > $NARGO_REFERENCE -cargo run -F codegen-docs -- info >> $NARGO_REFERENCE +cargo run --bin nargo -F codegen-docs -- info >> $NARGO_REFERENCE diff --git a/noir/noir-repo/tooling/acvm_cli/Cargo.toml b/noir/noir-repo/tooling/acvm_cli/Cargo.toml new file mode 100644 index 000000000000..72424405d367 --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "acvm_cli" +description = "The entrypoint for executing the ACVM" +# x-release-please-start-version +version = "0.40.0" +# x-release-please-end +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +repository.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +# Rename binary from `acvm_cli` to `acvm` +[[bin]] +name = "acvm" +path = "src/main.rs" + +[dependencies] +thiserror.workspace = true +toml.workspace = true +color-eyre = "0.6.2" +clap.workspace = true +acvm.workspace = true +nargo.workspace = true +const_format.workspace = true +bn254_blackbox_solver.workspace = true +acir.workspace = true + +# Logs +tracing-subscriber.workspace = true +tracing-appender = "0.2.3" + +[dev-dependencies] +rand = "0.8.5" +proptest = "1.2.0" +paste = "1.0.14" diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs new file mode 100644 index 000000000000..f6337c2eb35d --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -0,0 +1,79 @@ +use std::io::{self, Write}; + +use acir::circuit::Circuit; +use acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; +use clap::Args; + +use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; +use crate::cli::fs::witness::save_witness_to_dir; +use crate::errors::CliError; +use nargo::ops::{execute_circuit, DefaultForeignCallExecutor}; + +use super::fs::witness::create_output_witness_string; + +/// Executes a circuit to calculate its return value +#[derive(Debug, Clone, Args)] +pub(crate) struct ExecuteCommand { + /// Write the execution witness to named file + #[clap(long, short)] + output_witness: Option, + + /// The name of the toml file which contains the input witness map + #[clap(long, short)] + input_witness: String, + + /// The name of the binary file containing circuit bytecode + #[clap(long, short)] + bytecode: String, + + /// The working directory + #[clap(long, short)] + working_directory: String, + + /// Set to print output witness to stdout + #[clap(long, short, action)] + print: bool, +} + +fn run_command(args: ExecuteCommand) -> Result { + let bytecode = read_bytecode_from_file(&args.working_directory, &args.bytecode)?; + let circuit_inputs = read_inputs_from_file(&args.working_directory, &args.input_witness)?; + let output_witness = execute_program_from_witness(&circuit_inputs, &bytecode, None)?; + let output_witness_string = create_output_witness_string(&output_witness)?; + if args.output_witness.is_some() { + save_witness_to_dir( + &output_witness_string, + &args.working_directory, + &args.output_witness.unwrap(), + )?; + } + Ok(output_witness_string) +} + +pub(crate) fn run(args: ExecuteCommand) -> Result { + let print = args.print; + let output_witness_string = run_command(args)?; + if print { + io::stdout().write_all(output_witness_string.as_bytes()).unwrap(); + } + Ok(output_witness_string) +} + +pub(crate) fn execute_program_from_witness( + inputs_map: &WitnessMap, + bytecode: &Vec, + foreign_call_resolver_url: Option<&str>, +) -> Result { + let blackbox_solver = Bn254BlackBoxSolver::new(); + let circuit: Circuit = Circuit::deserialize_circuit(&bytecode) + .map_err(|_| CliError::CircuitDeserializationError())?; + let result = execute_circuit( + &circuit, + inputs_map.clone(), + &blackbox_solver, + &mut DefaultForeignCallExecutor::new(true, foreign_call_resolver_url), + ) + .map_err(|e| CliError::CircuitExecutionError(e)); + result +} diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/inputs.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/inputs.rs new file mode 100644 index 000000000000..2a46cfba8849 --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/inputs.rs @@ -0,0 +1,54 @@ +use acir::{ + native_types::{Witness, WitnessMap}, + FieldElement, +}; +use toml::Table; + +use crate::errors::{CliError, FilesystemError}; +use std::{fs::read, path::Path}; + +/// Returns the circuit's parameters parsed from a toml file at the given location +pub(crate) fn read_inputs_from_file>( + working_directory: P, + file_name: &String, +) -> Result { + let file_path = working_directory.as_ref().join(file_name); + if !file_path.exists() { + return Err(CliError::FilesystemError(FilesystemError::MissingTomlFile( + file_name.to_owned(), + file_path, + ))); + } + + let input_string = std::fs::read_to_string(file_path) + .map_err(|_| FilesystemError::InvalidTomlFile(file_name.clone()))?; + let input_map = input_string + .parse::() + .map_err(|_| FilesystemError::InvalidTomlFile(file_name.clone()))?; + let mut witnesses: WitnessMap = WitnessMap::new(); + for (key, value) in input_map.into_iter() { + let index = + Witness(key.trim().parse().map_err(|_| CliError::WitnessIndexError(key.clone()))?); + if !value.is_str() { + return Err(CliError::WitnessValueError(key.clone())); + } + let field = FieldElement::from_hex(value.as_str().unwrap()).unwrap(); + witnesses.insert(index, field); + } + + Ok(witnesses) +} + +/// Returns the circuit's bytecode read from the file at the given location +pub(crate) fn read_bytecode_from_file>( + working_directory: P, + file_name: &String, +) -> Result, FilesystemError> { + let file_path = working_directory.as_ref().join(file_name); + if !file_path.exists() { + return Err(FilesystemError::MissingBytecodeFile(file_name.to_owned(), file_path)); + } + let bytecode: Vec = + read(file_path).map_err(|_| FilesystemError::InvalidBytecodeFile(file_name.clone()))?; + Ok(bytecode) +} diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/mod.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/mod.rs new file mode 100644 index 000000000000..f23ba06fd8bc --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/mod.rs @@ -0,0 +1,2 @@ +pub(super) mod inputs; +pub(super) mod witness; diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs new file mode 100644 index 000000000000..2daaa5a3a584 --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/fs/witness.rs @@ -0,0 +1,36 @@ +use std::{ + collections::BTreeMap, + fs::File, + io::Write, + path::{Path, PathBuf}, +}; + +use acvm::acir::native_types::WitnessMap; + +use crate::errors::{CliError, FilesystemError}; + +/// Saves the provided output witnesses to a toml file created at the given location +pub(crate) fn save_witness_to_dir>( + output_witness: &String, + witness_dir: P, + file_name: &String, +) -> Result { + let witness_path = witness_dir.as_ref().join(file_name); + + let mut file = File::create(&witness_path) + .map_err(|_| FilesystemError::OutputWitnessCreationFailed(file_name.clone()))?; + write!(file, "{}", output_witness) + .map_err(|_| FilesystemError::OutputWitnessWriteFailed(file_name.clone()))?; + + Ok(witness_path) +} + +/// Creates a toml representation of the provided witness map +pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result { + let mut witness_map: BTreeMap = BTreeMap::new(); + for (key, value) in witnesses.clone().into_iter() { + witness_map.insert(key.0.to_string(), format!("0x{}", value.to_hex())); + } + + toml::to_string(&witness_map).map_err(|_| CliError::OutputWitnessSerializationFailed()) +} diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/mod.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/mod.rs new file mode 100644 index 000000000000..a610b08ab77e --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/mod.rs @@ -0,0 +1,41 @@ +use clap::{Parser, Subcommand}; +use color_eyre::eyre; +use const_format::formatcp; + +mod execute_cmd; +mod fs; + +const ACVM_VERSION: &str = env!("CARGO_PKG_VERSION"); + +static VERSION_STRING: &str = formatcp!("version = {}\n", ACVM_VERSION,); + +#[derive(Parser, Debug)] +#[command(name="acvm", author, version=VERSION_STRING, about, long_about = None)] +struct ACVMCli { + #[command(subcommand)] + command: ACVMCommand, +} + +#[non_exhaustive] +#[derive(Subcommand, Clone, Debug)] +enum ACVMCommand { + Execute(execute_cmd::ExecuteCommand), +} + +#[cfg(not(feature = "codegen-docs"))] +pub(crate) fn start_cli() -> eyre::Result<()> { + let ACVMCli { command } = ACVMCli::parse(); + + match command { + ACVMCommand::Execute(args) => execute_cmd::run(args), + }?; + + Ok(()) +} + +#[cfg(feature = "codegen-docs")] +pub(crate) fn start_cli() -> eyre::Result<()> { + let markdown: String = clap_markdown::help_markdown::(); + println!("{markdown}"); + Ok(()) +} diff --git a/noir/noir-repo/tooling/acvm_cli/src/errors.rs b/noir/noir-repo/tooling/acvm_cli/src/errors.rs new file mode 100644 index 000000000000..035388d05f7e --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/src/errors.rs @@ -0,0 +1,52 @@ +use nargo::NargoError; +use std::path::PathBuf; +use thiserror::Error; + +#[derive(Debug, Error)] +pub(crate) enum FilesystemError { + #[error( + " Error: cannot find {0} in expected location {1:?}.\n Please generate this file at the expected location." + )] + MissingTomlFile(String, PathBuf), + #[error(" Error: failed to parse toml file {0}.")] + InvalidTomlFile(String), + #[error( + " Error: cannot find {0} in expected location {1:?}.\n Please generate this file at the expected location." + )] + MissingBytecodeFile(String, PathBuf), + + #[error(" Error: failed to read bytecode file {0}.")] + InvalidBytecodeFile(String), + + #[error(" Error: failed to create output witness file {0}.")] + OutputWitnessCreationFailed(String), + + #[error(" Error: failed to write output witness file {0}.")] + OutputWitnessWriteFailed(String), +} + +#[derive(Debug, Error)] +pub(crate) enum CliError { + /// Filesystem errors + #[error(transparent)] + FilesystemError(#[from] FilesystemError), + + /// Error related to circuit deserialization + #[error("Error: failed to deserialize circuit")] + CircuitDeserializationError(), + + /// Error related to circuit execution + #[error(transparent)] + CircuitExecutionError(#[from] NargoError), + + /// Input Witness Value Error + #[error("Error: failed to parse witness value {0}")] + WitnessValueError(String), + + /// Input Witness Index Error + #[error("Error: failed to parse witness index {0}")] + WitnessIndexError(String), + + #[error(" Error: failed to serialize output witness.")] + OutputWitnessSerializationFailed(), +} diff --git a/noir/noir-repo/tooling/acvm_cli/src/main.rs b/noir/noir-repo/tooling/acvm_cli/src/main.rs new file mode 100644 index 000000000000..33cadc73a7cf --- /dev/null +++ b/noir/noir-repo/tooling/acvm_cli/src/main.rs @@ -0,0 +1,36 @@ +#![forbid(unsafe_code)] +#![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] +#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] + +mod cli; +mod errors; + +use std::env; + +use tracing_appender::rolling; +use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter}; + +fn main() { + // Setup tracing + if let Ok(log_dir) = env::var("ACVM_LOG_DIR") { + let debug_file = rolling::daily(log_dir, "acvm-log"); + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_writer(debug_file) + .with_ansi(false) + .with_env_filter(EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_ansi(true) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) + .init(); + } + + if let Err(report) = cli::start_cli() { + eprintln!("{report}"); + std::process::exit(1); + } +} diff --git a/yarn-project/Dockerfile b/yarn-project/Dockerfile index fcedd17ff189..e27b53536f25 100644 --- a/yarn-project/Dockerfile +++ b/yarn-project/Dockerfile @@ -2,6 +2,7 @@ FROM --platform=linux/amd64 aztecprotocol/bb.js as bb.js FROM --platform=linux/amd64 aztecprotocol/noir-packages as noir-packages FROM --platform=linux/amd64 aztecprotocol/l1-contracts as contracts FROM --platform=linux/amd64 aztecprotocol/noir-projects as noir-projects +FROM --platform=linux/amd64 aztecprotocol/noir as noir FROM node:18.19.0 as builder RUN apt update && apt install -y jq curl perl && rm -rf /var/lib/apt/lists/* && apt-get clean @@ -11,6 +12,8 @@ COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts COPY --from=noir-packages /usr/src/noir/packages /usr/src/noir/packages COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts COPY --from=noir-projects /usr/src/noir-projects /usr/src/noir-projects +# We want the native ACVM binary +COPY --from=noir /usr/src/noir/noir-repo/target/release/acvm /usr/src/noir/noir-repo/target/release/acvm WORKDIR /usr/src/yarn-project COPY . . @@ -34,8 +37,8 @@ RUN yarn workspaces focus @aztec/cli @aztec/aztec --production && yarn cache cle # ARG COMMIT_TAG="" # RUN ./scripts/version_packages.sh -# We no longer need nargo etc. -RUN rm -rf /usr/src/noir/noir-repo /usr/src/noir-projects /usr/src/l1-contracts +# We no longer need these. +RUN rm -rf /usr/src/noir-projects /usr/src/l1-contracts # Create minimal size image. FROM node:18.19.1-slim diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 794ee11874be..3de03460588d 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -50,6 +50,7 @@ import { GlobalVariableBuilder, PublicProcessorFactory, SequencerClient, + WASMSimulator, getGlobalVariableBuilder, } from '@aztec/sequencer-client'; import { ContractClassPublic, ContractInstanceWithAddress } from '@aztec/types/contracts'; @@ -605,6 +606,7 @@ export class AztecNodeService implements AztecNode { merkleTrees.asLatest(), this.contractDataSource, this.l1ToL2MessageSource, + new WASMSimulator(), ); const processor = await publicProcessorFactory.create(prevHeader, newGlobalVariables); const [, failedTxs] = await processor.process([tx]); diff --git a/yarn-project/aztec/Dockerfile b/yarn-project/aztec/Dockerfile index ae996a59dfbd..a78851eab10f 100644 --- a/yarn-project/aztec/Dockerfile +++ b/yarn-project/aztec/Dockerfile @@ -1,4 +1,6 @@ FROM aztecprotocol/yarn-project AS yarn-project +# ENV vars for using native ACVM simulation +ENV ACVM_BINARY_PATH="/usr/src/noir/noir-repo/target/release/acvm" ACVM_WORKING_DIRECTORY="/tmp/acvm" ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec/dest/bin/index.js"] EXPOSE 8080 diff --git a/yarn-project/circuit-types/src/interfaces/configs.ts b/yarn-project/circuit-types/src/interfaces/configs.ts index 24f8d467f13b..19dd07ac4937 100644 --- a/yarn-project/circuit-types/src/interfaces/configs.ts +++ b/yarn-project/circuit-types/src/interfaces/configs.ts @@ -14,4 +14,8 @@ export interface SequencerConfig { coinbase?: EthAddress; /** Address to receive fees. */ feeRecipient?: AztecAddress; + /** The working directory to use for simulation/proving */ + acvmWorkingDirectory?: string; + /** The path to the ACVM binary */ + acvmBinaryPath?: string; } diff --git a/yarn-project/end-to-end/Dockerfile b/yarn-project/end-to-end/Dockerfile index e1934cff7b2f..4b4d324c4091 100644 --- a/yarn-project/end-to-end/Dockerfile +++ b/yarn-project/end-to-end/Dockerfile @@ -2,6 +2,7 @@ FROM --platform=linux/amd64 aztecprotocol/bb.js as bb.js FROM --platform=linux/amd64 aztecprotocol/noir-packages as noir-packages FROM --platform=linux/amd64 aztecprotocol/l1-contracts as contracts FROM --platform=linux/amd64 aztecprotocol/noir-projects as noir-projects +FROM --platform=linux/amd64 aztecprotocol/noir as noir FROM node:18.19.0 as builder RUN apt update && apt install -y jq curl perl && rm -rf /var/lib/apt/lists/* && apt-get clean @@ -11,6 +12,8 @@ COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts COPY --from=noir-packages /usr/src/noir/packages /usr/src/noir/packages COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts COPY --from=noir-projects /usr/src/noir-projects /usr/src/noir-projects +# We want the native ACVM binary +COPY --from=noir /usr/src/noir/noir-repo/target/release/acvm /usr/src/noir/noir-repo/target/release/acvm WORKDIR /usr/src/yarn-project COPY . . @@ -32,8 +35,8 @@ RUN ./bootstrap.sh RUN yarn workspace @aztec/end-to-end run build:web RUN yarn workspaces focus @aztec/end-to-end --production && yarn cache clean -# We no longer need nargo etc. -RUN rm -rf /usr/src/noir/noir-repo /usr/src/noir-projects /usr/src/l1-contracts +# We no longer need these +RUN rm -rf /usr/src/noir-projects /usr/src/l1-contracts # Create minimal image. FROM node:18.19.1-slim diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index c8a118bee3e1..a0eae07fc895 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -22,6 +22,7 @@ import { createDebugLogger, createPXEClient, deployL1Contracts, + fileURLToPath, makeFetch, waitForPXE, } from '@aztec/aztec.js'; @@ -43,6 +44,7 @@ import { import { PXEService, PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { SequencerClient } from '@aztec/sequencer-client'; +import * as fs from 'fs/promises'; import * as path from 'path'; import { Account, @@ -62,12 +64,35 @@ import { isMetricsLoggingRequested, setupMetricsLogger } from './logging.js'; export { deployAndInitializeTokenAndBridgeContracts } from '../shared/cross_chain_test_harness.js'; -const { PXE_URL = '' } = process.env; +const { + PXE_URL = '', + NOIR_RELEASE_DIR = 'noir-repo/target/release', + TEMP_DIR = '/tmp', + ACVM_BINARY_PATH = '', + ACVM_WORKING_DIRECTORY = '', +} = process.env; const getAztecUrl = () => { return PXE_URL; }; +// Determines if we have access to the acvm binary and a tmp folder for temp files +const getACVMConfig = async (logger: DebugLogger) => { + try { + const expectedAcvmPath = ACVM_BINARY_PATH + ? ACVM_BINARY_PATH + : `${path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../../noir/', NOIR_RELEASE_DIR)}/acvm`; + await fs.access(expectedAcvmPath, fs.constants.R_OK); + const acvmWorkingDirectory = ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : `${TEMP_DIR}/acvm`; + await fs.mkdir(acvmWorkingDirectory, { recursive: true }); + logger(`Using native ACVM binary at ${expectedAcvmPath} with working directory ${acvmWorkingDirectory}`); + return { acvmWorkingDirectory, expectedAcvmPath }; + } catch (err) { + logger(`Native ACVM not available, error: ${err}`); + return undefined; + } +}; + export const setupL1Contracts = async ( l1RpcUrl: string, account: HDAccount | PrivateKeyAccount, @@ -290,6 +315,13 @@ export async function setup( config.l1Contracts = deployL1ContractsValues.l1ContractAddresses; logger('Creating and synching an aztec node...'); + + const acvmConfig = await getACVMConfig(logger); + if (acvmConfig) { + config.acvmWorkingDirectory = acvmConfig.acvmWorkingDirectory; + config.acvmBinaryPath = acvmConfig.expectedAcvmPath; + } + config.l1BlockPublishRetryIntervalMS = 100; const aztecNode = await AztecNodeService.createAndSync(config); const sequencer = aztecNode.getSequencer(); diff --git a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts index d6e628aeb661..4263a8b4d911 100644 --- a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts @@ -39,6 +39,7 @@ import { L1Publisher, RealRollupCircuitSimulator, SoloBlockBuilder, + WASMSimulator, getL1Publisher, getVerificationKeys, makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, @@ -136,7 +137,7 @@ describe('L1Publisher integration', () => { builderDb = await MerkleTrees.new(openTmpStore()).then(t => t.asLatest()); const vks = getVerificationKeys(); - const simulator = new RealRollupCircuitSimulator(); + const simulator = new RealRollupCircuitSimulator(new WASMSimulator()); const prover = new EmptyRollupProver(); builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); diff --git a/yarn-project/noir-protocol-circuits-types/package.json b/yarn-project/noir-protocol-circuits-types/package.json index c602ec636a39..8ca06e76d662 100644 --- a/yarn-project/noir-protocol-circuits-types/package.json +++ b/yarn-project/noir-protocol-circuits-types/package.json @@ -30,6 +30,7 @@ "@aztec/types": "workspace:^", "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js", "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi", + "@noir-lang/types": "portal:../../noir/packages/types", "tslib": "^2.4.0" }, "devDependencies": { diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index 3389821ab555..646839d36d0f 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -16,6 +16,7 @@ import { NoirCompiledCircuit } from '@aztec/types/noir'; import { WasmBlackBoxFunctionSolver, createBlackBoxSolver, executeCircuitWithBlackBoxSolver } from '@noir-lang/acvm_js'; import { Abi, abiDecode, abiEncode } from '@noir-lang/noirc_abi'; +import { WitnessMap } from '@noir-lang/types'; import PrivateKernelInitJson from './target/private_kernel_init.json' assert { type: 'json' }; import PrivateKernelInitSimulatedJson from './target/private_kernel_init_simulated.json' assert { type: 'json' }; @@ -23,11 +24,8 @@ import PrivateKernelInnerJson from './target/private_kernel_inner.json' assert { import PrivateKernelInnerSimulatedJson from './target/private_kernel_inner_simulated.json' assert { type: 'json' }; import PrivateKernelTailJson from './target/private_kernel_tail.json' assert { type: 'json' }; import PrivateKernelTailSimulatedJson from './target/private_kernel_tail_simulated.json' assert { type: 'json' }; -import PublicKernelAppLogicJson from './target/public_kernel_app_logic.json' assert { type: 'json' }; import PublicKernelAppLogicSimulatedJson from './target/public_kernel_app_logic_simulated.json' assert { type: 'json' }; -import PublicKernelSetupJson from './target/public_kernel_setup.json' assert { type: 'json' }; import PublicKernelSetupSimulatedJson from './target/public_kernel_setup_simulated.json' assert { type: 'json' }; -import PublicKernelTeardownJson from './target/public_kernel_teardown.json' assert { type: 'json' }; import PublicKernelTeardownSimulatedJson from './target/public_kernel_teardown_simulated.json' assert { type: 'json' }; import BaseRollupSimulatedJson from './target/rollup_base_simulated.json' assert { type: 'json' }; import MergeRollupJson from './target/rollup_merge.json' assert { type: 'json' }; @@ -49,17 +47,11 @@ import { import { InputType as InitInputType, ReturnType as InitReturnType } from './types/private_kernel_init_types.js'; import { InputType as InnerInputType, ReturnType as InnerReturnType } from './types/private_kernel_inner_types.js'; import { InputType as TailInputType, ReturnType as TailReturnType } from './types/private_kernel_tail_types.js'; -import { - InputType as PublicPublicPreviousInputType, - ReturnType as PublicPublicPreviousReturnType, -} from './types/public_kernel_app_logic_types.js'; -import { - InputType as PublicSetupInputType, - ReturnType as PublicSetupReturnType, -} from './types/public_kernel_setup_types.js'; -import { InputType as BaseRollupInputType, ReturnType as BaseRollupReturnType } from './types/rollup_base_types.js'; -import { InputType as MergeRollupInputType, ReturnType as MergeRollupReturnType } from './types/rollup_merge_types.js'; -import { InputType as RootRollupInputType, ReturnType as RootRollupReturnType } from './types/rollup_root_types.js'; +import { ReturnType as PublicPublicPreviousReturnType } from './types/public_kernel_app_logic_types.js'; +import { ReturnType as PublicSetupReturnType } from './types/public_kernel_setup_types.js'; +import { ReturnType as BaseRollupReturnType } from './types/rollup_base_types.js'; +import { ReturnType as MergeRollupReturnType } from './types/rollup_merge_types.js'; +import { ReturnType as RootRollupReturnType } from './types/rollup_root_types.js'; // TODO(Tom): This should be exported from noirc_abi /** @@ -82,11 +74,26 @@ export const PrivateKernelInnerArtifact = PrivateKernelInnerJson as NoirCompiled export const PrivateKernelTailArtifact = PrivateKernelTailJson as NoirCompiledCircuit; -export const PublicKernelSetupArtifact = PublicKernelSetupJson as NoirCompiledCircuit; +export const PublicKernelSetupArtifact = PublicKernelSetupSimulatedJson as NoirCompiledCircuit; + +export const PublicKernelAppLogicArtifact = PublicKernelAppLogicSimulatedJson as NoirCompiledCircuit; + +export const PublicKernelTeardownArtifact = PublicKernelTeardownSimulatedJson as NoirCompiledCircuit; + +export const BaseRollupArtifact = BaseRollupSimulatedJson as NoirCompiledCircuit; + +export const MergeRollupArtifact = MergeRollupJson as NoirCompiledCircuit; -export const PublicKernelAppLogicArtifact = PublicKernelAppLogicJson as NoirCompiledCircuit; +export const RootRollupArtifact = RootRollupJson as NoirCompiledCircuit; -export const PublicKernelTeardownArtifact = PublicKernelTeardownJson as NoirCompiledCircuit; +let solver: Promise; + +const getSolver = (): Promise => { + if (!solver) { + solver = createBlackBoxSolver(); + } + return solver; +}; /** * Executes the init private kernel. @@ -105,15 +112,6 @@ export async function executeInit( return mapPrivateKernelInnerCircuitPublicInputsFromNoir(returnType); } -let solver: Promise; - -const getSolver = (): Promise => { - if (!solver) { - solver = createBlackBoxSolver(); - } - return solver; -}; - /** * Executes the inner private kernel. * @param privateKernelInnerCircuitPrivateInputs - The private inputs to the inner private kernel. @@ -148,268 +146,171 @@ export async function executeTail( } /** - * Executes the public kernel in the setup phase. - * @param publicKernelPrivateInputs - The public kernel setup circuit private inputs. - * @returns The public inputs. + * Converts the inputs to the base rollup circuit into a witness map. + * @param inputs - The base rollup inputs. + * @returns The witness map */ -export async function executePublicKernelSetup( - publicKernelPrivateInputs: PublicKernelCircuitPrivateInputs, -): Promise { - const params: PublicSetupInputType = { - input: mapPublicKernelCircuitPrivateInputsToNoir(publicKernelPrivateInputs), - }; - - const returnType = await executePublicKernelSetupWithACVM(params); - - return mapPublicKernelCircuitPublicInputsFromNoir(returnType); +export function convertBaseRollupInputsToWitnessMap(inputs: BaseRollupInputs): WitnessMap { + const mapped = mapBaseRollupInputsToNoir(inputs); + const initialWitnessMap = abiEncode(BaseRollupSimulatedJson.abi as Abi, { inputs: mapped as any }); + return initialWitnessMap; } /** - * Executes the public kernel in the app logic phase. - * @param publicKernelPrivateInputs - The public kernel app logic circuit private inputs. - * @returns The public inputs. + * Converts the inputs to the merge rollup circuit into a witness map. + * @param inputs - The merge rollup inputs. + * @returns The witness map */ -export async function executePublicKernelAppLogic( - publicKernelPrivateInputs: PublicKernelCircuitPrivateInputs, -): Promise { - const params: PublicPublicPreviousInputType = { - input: mapPublicKernelCircuitPrivateInputsToNoir(publicKernelPrivateInputs), - }; - - const returnType = await executePublicKernelAppLogicWithACVM(params); - - return mapPublicKernelCircuitPublicInputsFromNoir(returnType); +export function convertMergeRollupInputsToWitnessMap(inputs: MergeRollupInputs): WitnessMap { + const mapped = mapMergeRollupInputsToNoir(inputs); + const initialWitnessMap = abiEncode(MergeRollupJson.abi as Abi, { inputs: mapped as any }); + return initialWitnessMap; } /** - * Executes the public kernel in the teardown phase. - * @param publicKernelPrivateInputs - The public kernel teardown circuit private inputs. - * @returns The public inputs. + * Converts the inputs to the root rollup circuit into a witness map. + * @param inputs - The root rollup inputs. + * @returns The witness map */ -export async function executePublicKernelTeardown( - publicKernelPrivateInputs: PublicKernelCircuitPrivateInputs, -): Promise { - const params: PublicPublicPreviousInputType = { - input: mapPublicKernelCircuitPrivateInputsToNoir(publicKernelPrivateInputs), - }; - - const returnType = await executePublicKernelTeardownWithACVM(params); - - return mapPublicKernelCircuitPublicInputsFromNoir(returnType); +export function convertRootRollupInputsToWitnessMap(inputs: RootRollupInputs): WitnessMap { + const mapped = mapRootRollupInputsToNoir(inputs); + const initialWitnessMap = abiEncode(RootRollupJson.abi as Abi, { inputs: mapped as any }); + return initialWitnessMap; } - /** - * Executes the root rollup. - * @param rootRollupInputs - The root rollup inputs. - * @returns The public inputs. + * Converts the inputs to the public setup circuit into a witness map + * @param inputs - The public kernel inputs. + * @returns The witness map */ -export async function executeRootRollup(rootRollupInputs: RootRollupInputs): Promise { - const params: RootRollupInputType = { - inputs: mapRootRollupInputsToNoir(rootRollupInputs), - }; - - const returnType = await executeRootRollupWithACVM(params); - - return mapRootRollupPublicInputsFromNoir(returnType); +export function convertPublicSetupRollupInputsToWitnessMap(inputs: PublicKernelCircuitPrivateInputs): WitnessMap { + const mapped = mapPublicKernelCircuitPrivateInputsToNoir(inputs); + const initialWitnessMap = abiEncode(PublicKernelSetupSimulatedJson.abi as Abi, { input: mapped as any }); + return initialWitnessMap; } /** - * Executes the merge rollup. - * @param mergeRollupInputs - The merge rollup inputs. - * @returns The public inputs. + * Converts the inputs to the public setup circuit into a witness map + * @param inputs - The public kernel inputs. + * @returns The witness map */ -export async function executeMergeRollup(mergeRollupInputs: MergeRollupInputs): Promise { - const params: MergeRollupInputType = { - inputs: mapMergeRollupInputsToNoir(mergeRollupInputs), - }; - - const returnType = await executeMergeRollupWithACVM(params); - - return mapBaseOrMergeRollupPublicInputsFromNoir(returnType); +export function convertPublicInnerRollupInputsToWitnessMap(inputs: PublicKernelCircuitPrivateInputs): WitnessMap { + const mapped = mapPublicKernelCircuitPrivateInputsToNoir(inputs); + const initialWitnessMap = abiEncode(PublicKernelAppLogicSimulatedJson.abi as Abi, { input: mapped as any }); + return initialWitnessMap; } /** - * Executes the base rollup. - * @param mergeRollupInputs - The merge rollup inputs. - * @returns The public inputs. + * Converts the inputs to the public tail circuit into a witness map + * @param inputs - The public kernel inputs. + * @returns The witness map */ -export async function executeBaseRollup(baseRollupInputs: BaseRollupInputs): Promise { - const params: BaseRollupInputType = { - inputs: mapBaseRollupInputsToNoir(baseRollupInputs), - }; - - const returnType = await executeBaseRollupWithACVM(params); - - return mapBaseOrMergeRollupPublicInputsFromNoir(returnType); +export function convertPublicTailRollupInputsToWitnessMap(inputs: PublicKernelCircuitPrivateInputs): WitnessMap { + const mapped = mapPublicKernelCircuitPrivateInputsToNoir(inputs); + const initialWitnessMap = abiEncode(PublicKernelTeardownSimulatedJson.abi as Abi, { input: mapped as any }); + return initialWitnessMap; } /** - * Executes the private init kernel with the given inputs using the acvm. - * + * Converts the outputs to the base rollup circuit. + * @param outputs - The base rollup outputs as a witness map. + * @returns The public inputs. */ -async function executePrivateKernelInitWithACVM(input: InitInputType): Promise { - const initialWitnessMap = abiEncode(PrivateKernelInitSimulatedJson.abi as Abi, input as any); - - // Execute the circuit on those initial witness values - // - // Decode the bytecode from base64 since the acvm does not know about base64 encoding - const decodedBytecode = Buffer.from(PrivateKernelInitSimulatedJson.bytecode, 'base64'); - // - // Execute the circuit - const _witnessMap = await executeCircuitWithBlackBoxSolver( - await getSolver(), - decodedBytecode, - initialWitnessMap, - () => { - throw Error('unexpected oracle during execution'); - }, - ); - +export function convertBaseRollupOutputsFromWitnessMap(outputs: WitnessMap): BaseOrMergeRollupPublicInputs { // Decode the witness map into two fields, the return values and the inputs - const decodedInputs: DecodedInputs = abiDecode(PrivateKernelInitSimulatedJson.abi as Abi, _witnessMap); + const decodedInputs: DecodedInputs = abiDecode(BaseRollupSimulatedJson.abi as Abi, outputs); // Cast the inputs as the return type - return decodedInputs.return_value as InitReturnType; + const returnType = decodedInputs.return_value as BaseRollupReturnType; + + return mapBaseOrMergeRollupPublicInputsFromNoir(returnType); } /** - * Executes the private inner kernel with the given inputs using the acvm. + * Converts the outputs to the merge rollup circuit. + * @param outputs - The merge rollup outputs as a witness map. + * @returns The public inputs. */ -async function executePrivateKernelInnerWithACVM(input: InnerInputType): Promise { - const initialWitnessMap = abiEncode(PrivateKernelInnerSimulatedJson.abi as Abi, input as any); - - // Execute the circuit on those initial witness values - // - // Decode the bytecode from base64 since the acvm does not know about base64 encoding - const decodedBytecode = Buffer.from(PrivateKernelInnerSimulatedJson.bytecode, 'base64'); - // - // Execute the circuit - const _witnessMap = await executeCircuitWithBlackBoxSolver( - await getSolver(), - decodedBytecode, - initialWitnessMap, - () => { - throw Error('unexpected oracle during execution'); - }, - ); - +export function convertMergeRollupOutputsFromWitnessMap(outputs: WitnessMap): BaseOrMergeRollupPublicInputs { // Decode the witness map into two fields, the return values and the inputs - const decodedInputs: DecodedInputs = abiDecode(PrivateKernelInnerSimulatedJson.abi as Abi, _witnessMap); + const decodedInputs: DecodedInputs = abiDecode(MergeRollupJson.abi as Abi, outputs); // Cast the inputs as the return type - return decodedInputs.return_value as InnerReturnType; + const returnType = decodedInputs.return_value as MergeRollupReturnType; + + return mapBaseOrMergeRollupPublicInputsFromNoir(returnType); } /** - * Executes the private tail kernel with the given inputs using the acvm. + * Converts the outputs to the root rollup circuit. + * @param outputs - The root rollup outputs as a witness map. + * @returns The public inputs. */ -async function executePrivateKernelTailWithACVM(input: TailInputType): Promise { - const initialWitnessMap = abiEncode(PrivateKernelTailSimulatedJson.abi as Abi, input as any); - - // Execute the circuit on those initial witness values - // - // Decode the bytecode from base64 since the acvm does not know about base64 encoding - const decodedBytecode = Buffer.from(PrivateKernelTailSimulatedJson.bytecode, 'base64'); - // - // Execute the circuit - const _witnessMap = await executeCircuitWithBlackBoxSolver( - await getSolver(), - decodedBytecode, - initialWitnessMap, - () => { - throw Error('unexpected oracle during execution'); - }, - ); - +export function convertRootRollupOutputsFromWitnessMap(outputs: WitnessMap): RootRollupPublicInputs { // Decode the witness map into two fields, the return values and the inputs - const decodedInputs: DecodedInputs = abiDecode(PrivateKernelTailSimulatedJson.abi as Abi, _witnessMap); + const decodedInputs: DecodedInputs = abiDecode(RootRollupJson.abi as Abi, outputs); // Cast the inputs as the return type - return decodedInputs.return_value as TailReturnType; + const returnType = decodedInputs.return_value as RootRollupReturnType; + + return mapRootRollupPublicInputsFromNoir(returnType); } /** - * Executes the public setup kernel with the given inputs + * Converts the outputs to the public setup circuit. + * @param outputs - The public kernel outputs as a witness map. + * @returns The public inputs. */ -async function executePublicKernelSetupWithACVM(input: PublicSetupInputType): Promise { - const initialWitnessMap = abiEncode(PublicKernelSetupSimulatedJson.abi as Abi, input as any); - const decodedBytecode = Buffer.from(PublicKernelSetupSimulatedJson.bytecode, 'base64'); - // Execute the circuit - const _witnessMap = await executeCircuitWithBlackBoxSolver( - await getSolver(), - decodedBytecode, - initialWitnessMap, - () => { - throw Error('unexpected oracle during execution'); - }, - ); - +export function convertPublicSetupRollupOutputFromWitnessMap(outputs: WitnessMap): PublicKernelCircuitPublicInputs { // Decode the witness map into two fields, the return values and the inputs - const decodedInputs: DecodedInputs = abiDecode(PublicKernelSetupSimulatedJson.abi as Abi, _witnessMap); + const decodedInputs: DecodedInputs = abiDecode(PublicKernelSetupSimulatedJson.abi as Abi, outputs); + // Cast the inputs as the return type - return decodedInputs.return_value as PublicSetupReturnType; + const returnType = decodedInputs.return_value as PublicSetupReturnType; + + return mapPublicKernelCircuitPublicInputsFromNoir(returnType); } /** - * Executes the public app logic kernel with the given inputs using the acvm. + * Converts the outputs to the public inner circuit. + * @param outputs - The public kernel outputs as a witness map. + * @returns The public inputs. */ -async function executePublicKernelAppLogicWithACVM( - input: PublicPublicPreviousInputType, -): Promise { - const initialWitnessMap = abiEncode(PublicKernelAppLogicSimulatedJson.abi as Abi, input as any); - const decodedBytecode = Buffer.from(PublicKernelAppLogicSimulatedJson.bytecode, 'base64'); - // Execute the circuit - const _witnessMap = await executeCircuitWithBlackBoxSolver( - await getSolver(), - decodedBytecode, - initialWitnessMap, - () => { - throw Error('unexpected oracle during execution'); - }, - ); - +export function convertPublicInnerRollupOutputFromWitnessMap(outputs: WitnessMap): PublicKernelCircuitPublicInputs { // Decode the witness map into two fields, the return values and the inputs - const decodedInputs: DecodedInputs = abiDecode(PublicKernelAppLogicSimulatedJson.abi as Abi, _witnessMap); + const decodedInputs: DecodedInputs = abiDecode(PublicKernelAppLogicSimulatedJson.abi as Abi, outputs); // Cast the inputs as the return type - return decodedInputs.return_value as PublicPublicPreviousReturnType; + const returnType = decodedInputs.return_value as PublicPublicPreviousReturnType; + + return mapPublicKernelCircuitPublicInputsFromNoir(returnType); } /** - * Executes the public teardown kernel with the given inputs using the acvm. + * Converts the outputs to the public tail circuit. + * @param outputs - The public kernel outputs as a witness map. + * @returns The public inputs. */ -async function executePublicKernelTeardownWithACVM( - input: PublicPublicPreviousInputType, -): Promise { - const initialWitnessMap = abiEncode(PublicKernelTeardownSimulatedJson.abi as Abi, input as any); - const decodedBytecode = Buffer.from(PublicKernelTeardownSimulatedJson.bytecode, 'base64'); - // Execute the circuit - const _witnessMap = await executeCircuitWithBlackBoxSolver( - await getSolver(), - decodedBytecode, - initialWitnessMap, - () => { - throw Error('unexpected oracle during execution'); - }, - ); - +export function convertPublicTailRollupOutputFromWitnessMap(outputs: WitnessMap): PublicKernelCircuitPublicInputs { // Decode the witness map into two fields, the return values and the inputs - const decodedInputs: DecodedInputs = abiDecode(PublicKernelTeardownSimulatedJson.abi as Abi, _witnessMap); + const decodedInputs: DecodedInputs = abiDecode(PublicKernelTeardownSimulatedJson.abi as Abi, outputs); // Cast the inputs as the return type - return decodedInputs.return_value as PublicPublicPreviousReturnType; + const returnType = decodedInputs.return_value as PublicPublicPreviousReturnType; + + return mapPublicKernelCircuitPublicInputsFromNoir(returnType); } /** - * Executes the root rollup with the given inputs using the acvm. + * Executes the private init kernel with the given inputs using the acvm. + * */ -async function executeRootRollupWithACVM(input: RootRollupInputType): Promise { - const initialWitnessMap = abiEncode(RootRollupJson.abi as Abi, input as any); +async function executePrivateKernelInitWithACVM(input: InitInputType): Promise { + const initialWitnessMap = abiEncode(PrivateKernelInitSimulatedJson.abi as Abi, input as any); // Execute the circuit on those initial witness values // // Decode the bytecode from base64 since the acvm does not know about base64 encoding - const decodedBytecode = Buffer.from(RootRollupJson.bytecode, 'base64'); + const decodedBytecode = Buffer.from(PrivateKernelInitSimulatedJson.bytecode, 'base64'); // // Execute the circuit const _witnessMap = await executeCircuitWithBlackBoxSolver( @@ -421,22 +322,23 @@ async function executeRootRollupWithACVM(input: RootRollupInputType): Promise { - const initialWitnessMap = abiEncode(MergeRollupJson.abi as Abi, input as any); +async function executePrivateKernelInnerWithACVM(input: InnerInputType): Promise { + const initialWitnessMap = abiEncode(PrivateKernelInnerSimulatedJson.abi as Abi, input as any); // Execute the circuit on those initial witness values // // Decode the bytecode from base64 since the acvm does not know about base64 encoding - const decodedBytecode = Buffer.from(MergeRollupJson.bytecode, 'base64'); + const decodedBytecode = Buffer.from(PrivateKernelInnerSimulatedJson.bytecode, 'base64'); // // Execute the circuit const _witnessMap = await executeCircuitWithBlackBoxSolver( @@ -448,22 +350,23 @@ async function executeMergeRollupWithACVM(input: MergeRollupInputType): Promise< }, ); - const decodedInputs: DecodedInputs = abiDecode(MergeRollupJson.abi as Abi, _witnessMap); + // Decode the witness map into two fields, the return values and the inputs + const decodedInputs: DecodedInputs = abiDecode(PrivateKernelInnerSimulatedJson.abi as Abi, _witnessMap); // Cast the inputs as the return type - return decodedInputs.return_value as MergeRollupReturnType; + return decodedInputs.return_value as InnerReturnType; } /** - * Executes the base rollup with the given inputs using the acvm. + * Executes the private tail kernel with the given inputs using the acvm. */ -async function executeBaseRollupWithACVM(input: BaseRollupInputType): Promise { - const initialWitnessMap = abiEncode(BaseRollupSimulatedJson.abi as Abi, input as any); +async function executePrivateKernelTailWithACVM(input: TailInputType): Promise { + const initialWitnessMap = abiEncode(PrivateKernelTailSimulatedJson.abi as Abi, input as any); // Execute the circuit on those initial witness values // // Decode the bytecode from base64 since the acvm does not know about base64 encoding - const decodedBytecode = Buffer.from(BaseRollupSimulatedJson.bytecode, 'base64'); + const decodedBytecode = Buffer.from(PrivateKernelTailSimulatedJson.bytecode, 'base64'); // // Execute the circuit const _witnessMap = await executeCircuitWithBlackBoxSolver( @@ -476,8 +379,8 @@ async function executeBaseRollupWithACVM(input: BaseRollupInputType): Promise { describe('circuits simulator', () => { beforeEach(() => { - const simulator = new RealRollupCircuitSimulator(); + const simulator = new RealRollupCircuitSimulator(new WASMSimulator()); const prover = new EmptyRollupProver(); builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); }); @@ -423,7 +424,7 @@ describe('sequencer/solo_block_builder', () => { // This test specifically tests nullifier values which previously caused e2e_private_token test to fail it('e2e_private_token edge case regression test on nullifier values', async () => { - const simulator = new RealRollupCircuitSimulator(); + const simulator = new RealRollupCircuitSimulator(new WASMSimulator()); const prover = new EmptyRollupProver(); builder = new SoloBlockBuilder(builderDb, vks, simulator, prover); // update the starting tree diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts index 6e939636be1e..81dba1917d81 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts @@ -1,4 +1,5 @@ import { Body, ContractData, L2Block, MerkleTreeId, PublicDataWrite, TxEffect, TxL2Logs } from '@aztec/circuit-types'; +import { CircuitSimulationStats } from '@aztec/circuit-types/stats'; import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, @@ -52,6 +53,7 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize'; +import { elapsed } from '@aztec/foundation/timer'; import { MerkleTreeOperations } from '@aztec/world-state'; import chunk from 'lodash.chunk'; @@ -189,22 +191,66 @@ export class SoloBlockBuilder implements BlockBuilder { // padArrayEnd throws if the array is already full. Otherwise it pads till we reach the required size const newL1ToL2MessagesTuple = padArrayEnd(newL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - // Run the base rollup circuits for the txs - const baseRollupOutputs: [BaseOrMergeRollupPublicInputs, Proof][] = []; + // Perform all tree insertions and retrieve snapshots for all base rollups + const baseRollupInputs: BaseRollupInputs[] = []; + const treeSnapshots: Map[] = []; for (const tx of txs) { - baseRollupOutputs.push(await this.baseRollupCircuit(tx, globalVariables)); + const input = await this.buildBaseRollupInput(tx, globalVariables); + baseRollupInputs.push(input); + const promises = [ + MerkleTreeId.NOTE_HASH_TREE, + MerkleTreeId.CONTRACT_TREE, + MerkleTreeId.NULLIFIER_TREE, + MerkleTreeId.PUBLIC_DATA_TREE, + ].map(async (id: MerkleTreeId) => { + return { key: id, value: await this.getTreeSnapshot(id) }; + }); + const snapshots: Map = new Map( + (await Promise.all(promises)).map(obj => [obj.key, obj.value]), + ); + treeSnapshots.push(snapshots); + } + + // Run the base rollup circuits for the txs in parallel + const baseRollupOutputs: Promise<[BaseOrMergeRollupPublicInputs, Proof]>[] = []; + for (let i = 0; i < txs.length; i++) { + baseRollupOutputs.push(this.baseRollupCircuit(txs[i], baseRollupInputs[i], treeSnapshots[i])); } // Run merge rollups in layers until we have only two outputs - let mergeRollupInputs: [BaseOrMergeRollupPublicInputs, Proof][] = baseRollupOutputs; - let mergeRollupOutputs: [BaseOrMergeRollupPublicInputs, Proof][] = []; + // All merge circuits for each layer are simulated in parallel + const [duration, mergeInputs] = await elapsed(() => Promise.all(baseRollupOutputs)); + for (let i = 0; i < mergeInputs.length; i++) { + this.debug(`Simulated base rollup circuit`, { + eventName: 'circuit-simulation', + circuitName: 'base-rollup', + duration: duration / mergeInputs.length, + inputSize: baseRollupInputs[i].toBuffer().length, + outputSize: mergeInputs[i][0].toBuffer().length, + } satisfies CircuitSimulationStats); + } + let mergeRollupInputs: [BaseOrMergeRollupPublicInputs, Proof][] = mergeInputs; while (mergeRollupInputs.length > 2) { + const mergeInputStructs: MergeRollupInputs[] = []; for (const pair of chunk(mergeRollupInputs, 2)) { const [r1, r2] = pair; - mergeRollupOutputs.push(await this.mergeRollupCircuit(r1, r2)); + mergeInputStructs.push(this.createMergeRollupInputs(r1, r2)); + } + + const [duration, mergeOutputs] = await elapsed(() => + Promise.all(mergeInputStructs.map(async input => await this.mergeRollupCircuit(input))), + ); + + for (let i = 0; i < mergeOutputs.length; i++) { + this.debug(`Simulated merge rollup circuit`, { + eventName: 'circuit-simulation', + circuitName: 'merge-rollup', + duration: duration / mergeOutputs.length, + inputSize: mergeInputStructs[i].toBuffer().length, + outputSize: mergeOutputs[i][0].toBuffer().length, + } satisfies CircuitSimulationStats); } - mergeRollupInputs = mergeRollupOutputs; - mergeRollupOutputs = []; + mergeRollupInputs = mergeOutputs; } // Run the root rollup with the last two merge rollups (or base, if no merge layers) @@ -214,26 +260,29 @@ export class SoloBlockBuilder implements BlockBuilder { protected async baseRollupCircuit( tx: ProcessedTx, - globalVariables: GlobalVariables, + inputs: BaseRollupInputs, + treeSnapshots: Map, ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { this.debug(`Running base rollup for ${tx.hash}`); - const rollupInput = await this.buildBaseRollupInput(tx, globalVariables); - const rollupOutput = await this.simulator.baseRollupCircuit(rollupInput); - await this.validatePartialState(rollupOutput.end); - const proof = await this.prover.getBaseRollupProof(rollupInput, rollupOutput); + const rollupOutput = await this.simulator.baseRollupCircuit(inputs); + this.validatePartialState(rollupOutput.end, treeSnapshots); + const proof = await this.prover.getBaseRollupProof(inputs, rollupOutput); return [rollupOutput, proof]; } - protected async mergeRollupCircuit( + protected createMergeRollupInputs( left: [BaseOrMergeRollupPublicInputs, Proof], right: [BaseOrMergeRollupPublicInputs, Proof], - ): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { + ) { const vk = this.getVerificationKey(left[0].rollupType); const mergeInputs = new MergeRollupInputs([ this.getPreviousRollupDataFromPublicInputs(left[0], left[1], vk), this.getPreviousRollupDataFromPublicInputs(right[0], right[1], vk), ]); + return mergeInputs; + } + protected async mergeRollupCircuit(mergeInputs: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { this.debug(`Running merge rollup circuit`); const output = await this.simulator.mergeRollupCircuit(mergeInputs); const proof = await this.prover.getMergeRollupProof(mergeInputs, output); @@ -279,40 +328,50 @@ export class SoloBlockBuilder implements BlockBuilder { return [rootOutput, rootProof]; } - protected async validatePartialState(partialState: PartialStateReference) { - await Promise.all([ - this.validateSimulatedTree( - await this.getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), - partialState.noteHashTree, - 'NoteHashTree', - ), - this.validateSimulatedTree( - await this.getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE), - partialState.nullifierTree, - 'NullifierTree', - ), - this.validateSimulatedTree( - await this.getTreeSnapshot(MerkleTreeId.CONTRACT_TREE), - partialState.contractTree, - 'ContractTree', - ), - this.validateSimulatedTree( - await this.getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), - partialState.publicDataTree, - 'PublicDataTree', - ), - ]); + protected validatePartialState( + partialState: PartialStateReference, + treeSnapshots: Map, + ) { + this.validateSimulatedTree( + treeSnapshots.get(MerkleTreeId.NOTE_HASH_TREE)!, + partialState.noteHashTree, + 'NoteHashTree', + ); + this.validateSimulatedTree( + treeSnapshots.get(MerkleTreeId.NULLIFIER_TREE)!, + partialState.nullifierTree, + 'NullifierTree', + ); + this.validateSimulatedTree( + treeSnapshots.get(MerkleTreeId.CONTRACT_TREE)!, + partialState.contractTree, + 'ContractTree', + ); + this.validateSimulatedTree( + treeSnapshots.get(MerkleTreeId.PUBLIC_DATA_TREE)!, + partialState.publicDataTree, + 'PublicDataTree', + ); } protected async validateState(state: StateReference) { - await Promise.all([ - this.validateSimulatedTree( - await this.getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), - state.l1ToL2MessageTree, - 'L1ToL2MessageTree', - ), - this.validatePartialState(state.partial), - ]); + const promises = [ + MerkleTreeId.NOTE_HASH_TREE, + MerkleTreeId.CONTRACT_TREE, + MerkleTreeId.NULLIFIER_TREE, + MerkleTreeId.PUBLIC_DATA_TREE, + ].map(async (id: MerkleTreeId) => { + return { key: id, value: await this.getTreeSnapshot(id) }; + }); + const snapshots: Map = new Map( + (await Promise.all(promises)).map(obj => [obj.key, obj.value]), + ); + this.validatePartialState(state.partial, snapshots); + this.validateSimulatedTree( + await this.getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE), + state.l1ToL2MessageTree, + 'L1ToL2MessageTree', + ); } // Validate that the roots of all local trees match the output of the root circuit simulation diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 242221f3f5e2..f41ed5ac7fba 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -1,7 +1,10 @@ import { ContractDataSource, L1ToL2MessageSource, L2BlockSource } from '@aztec/circuit-types'; +import { createDebugLogger } from '@aztec/foundation/log'; import { P2P } from '@aztec/p2p'; import { WorldStateSynchronizer } from '@aztec/world-state'; +import * as fs from 'fs/promises'; + import { SoloBlockBuilder } from '../block_builder/solo_block_builder.js'; import { SequencerClientConfig } from '../config.js'; import { getGlobalVariableBuilder } from '../global_variable_builder/index.js'; @@ -10,7 +13,32 @@ import { EmptyRollupProver } from '../prover/empty.js'; import { getL1Publisher } from '../publisher/index.js'; import { Sequencer, SequencerConfig } from '../sequencer/index.js'; import { PublicProcessorFactory } from '../sequencer/public_processor.js'; +import { NativeACVMSimulator } from '../simulator/acvm_native.js'; +import { WASMSimulator } from '../simulator/acvm_wasm.js'; import { RealRollupCircuitSimulator } from '../simulator/rollup.js'; +import { SimulationProvider } from '../simulator/simulation_provider.js'; + +const logger = createDebugLogger('aztec:sequencer-client'); + +/** + * Factory function to create a simulation provider. Will attempt to use native binary simulation falling back to WASM if unavailable. + * @param config - The provided sequencer client configuration + * @returns The constructed simulation provider + */ +async function getSimulationProvider(config: SequencerClientConfig): Promise { + if (config.acvmBinaryPath && config.acvmWorkingDirectory) { + try { + await fs.access(config.acvmBinaryPath, fs.constants.R_OK); + await fs.mkdir(config.acvmWorkingDirectory, { recursive: true }); + logger(`Using native ACVM at ${config.acvmBinaryPath}`); + return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath); + } catch { + logger(`Failed to access ACVM at ${config.acvmBinaryPath}, falling back to WASM`); + } + } + logger('Using WASM ACVM simulation'); + return new WASMSimulator(); +} /** * Encapsulates the full sequencer and publisher. @@ -40,14 +68,21 @@ export class SequencerClient { const globalsBuilder = getGlobalVariableBuilder(config); const merkleTreeDb = worldStateSynchronizer.getLatest(); + const simulationProvider = await getSimulationProvider(config); + const blockBuilder = new SoloBlockBuilder( merkleTreeDb, getVerificationKeys(), - new RealRollupCircuitSimulator(), + new RealRollupCircuitSimulator(simulationProvider), new EmptyRollupProver(), ); - const publicProcessorFactory = new PublicProcessorFactory(merkleTreeDb, contractDataSource, l1ToL2MessageSource); + const publicProcessorFactory = new PublicProcessorFactory( + merkleTreeDb, + contractDataSource, + l1ToL2MessageSource, + simulationProvider, + ); const sequencer = new Sequencer( publisher, diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 43bfa76a1758..0c6eed374ae6 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -48,6 +48,8 @@ export function getConfigEnvVars(): SequencerClientConfig { OUTBOX_CONTRACT_ADDRESS, COINBASE, FEE_RECIPIENT, + ACVM_WORKING_DIRECTORY, + ACVM_BINARY_PATH, } = process.env; const publisherPrivateKey: Hex = SEQ_PUBLISHER_PRIVATE_KEY @@ -82,5 +84,7 @@ export function getConfigEnvVars(): SequencerClientConfig { // TODO: undefined should not be allowed for the following 2 values in PROD coinbase: COINBASE ? EthAddress.fromString(COINBASE) : undefined, feeRecipient: FEE_RECIPIENT ? AztecAddress.fromString(FEE_RECIPIENT) : undefined, + acvmWorkingDirectory: ACVM_WORKING_DIRECTORY ? ACVM_WORKING_DIRECTORY : undefined, + acvmBinaryPath: ACVM_BINARY_PATH ? ACVM_BINARY_PATH : undefined, }; } diff --git a/yarn-project/sequencer-client/src/index.ts b/yarn-project/sequencer-client/src/index.ts index c2c445dd1da6..adae5cf85f2f 100644 --- a/yarn-project/sequencer-client/src/index.ts +++ b/yarn-project/sequencer-client/src/index.ts @@ -12,4 +12,6 @@ export * from './global_variable_builder/index.js'; export { RealRollupCircuitSimulator } from './simulator/rollup.js'; export { EmptyRollupProver } from './prover/empty.js'; export { SoloBlockBuilder } from './block_builder/solo_block_builder.js'; +export { WASMSimulator } from './simulator/acvm_wasm.js'; +export { SimulationProvider } from './simulator/simulation_provider.js'; export { makeProcessedTx, makeEmptyProcessedTx } from './sequencer/processed_tx.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts index d5013bed5ed1..8affece38133 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.test.ts @@ -48,6 +48,7 @@ import { jest } from '@jest/globals'; import { MockProxy, mock } from 'jest-mock-extended'; import { PublicProver } from '../prover/index.js'; +import { WASMSimulator } from '../simulator/acvm_wasm.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB, WorldStatePublicDB } from '../simulator/public_executor.js'; import { RealPublicKernelCircuitSimulator } from '../simulator/public_kernel.js'; @@ -167,7 +168,7 @@ describe('public_processor', () => { beforeEach(() => { const path = times(PUBLIC_DATA_TREE_HEIGHT, i => Buffer.alloc(32, i)); db.getSiblingPath.mockResolvedValue(new SiblingPath(PUBLIC_DATA_TREE_HEIGHT, path)); - publicKernel = new RealPublicKernelCircuitSimulator(); + publicKernel = new RealPublicKernelCircuitSimulator(new WASMSimulator()); processor = new PublicProcessor( db, publicExecutor, diff --git a/yarn-project/sequencer-client/src/sequencer/public_processor.ts b/yarn-project/sequencer-client/src/sequencer/public_processor.ts index 1cacdd2d5e86..c53b9b47ea88 100644 --- a/yarn-project/sequencer-client/src/sequencer/public_processor.ts +++ b/yarn-project/sequencer-client/src/sequencer/public_processor.ts @@ -11,6 +11,7 @@ import { PublicProver } from '../prover/index.js'; import { PublicKernelCircuitSimulator } from '../simulator/index.js'; import { ContractsDataSourcePublicDB, WorldStateDB, WorldStatePublicDB } from '../simulator/public_executor.js'; import { RealPublicKernelCircuitSimulator } from '../simulator/public_kernel.js'; +import { SimulationProvider } from '../simulator/simulation_provider.js'; import { AbstractPhaseManager } from './abstract_phase_manager.js'; import { PhaseManagerFactory } from './phase_manager_factory.js'; import { FailedTx, ProcessedTx, makeEmptyProcessedTx, makeProcessedTx } from './processed_tx.js'; @@ -23,6 +24,7 @@ export class PublicProcessorFactory { private merkleTree: MerkleTreeOperations, private contractDataSource: ContractDataSource, private l1Tol2MessagesDataSource: L1ToL2MessageSource, + private simulator: SimulationProvider, ) {} /** @@ -45,7 +47,7 @@ export class PublicProcessorFactory { return new PublicProcessor( this.merkleTree, publicExecutor, - new RealPublicKernelCircuitSimulator(), + new RealPublicKernelCircuitSimulator(this.simulator), new EmptyPublicProver(), globalVariables, historicalHeader, diff --git a/yarn-project/sequencer-client/src/simulator/acvm_native.ts b/yarn-project/sequencer-client/src/simulator/acvm_native.ts new file mode 100644 index 000000000000..47c1c5e6d489 --- /dev/null +++ b/yarn-project/sequencer-client/src/simulator/acvm_native.ts @@ -0,0 +1,112 @@ +import { randomBytes } from '@aztec/foundation/crypto'; +import { NoirCompiledCircuit } from '@aztec/types/noir'; + +import { WitnessMap } from '@noir-lang/types'; +import * as proc from 'child_process'; +import fs from 'fs/promises'; + +import { SimulationProvider } from './simulation_provider.js'; + +/** + * Parses a TOML format witness map string into a Map structure + * @param outputString - The witness map in TOML format + * @returns The parsed witness map + */ +function parseIntoWitnessMap(outputString: string) { + const lines = outputString.split('\n'); + return new Map( + lines + .filter((line: string) => line.length) + .map((line: string) => { + const pair = line.replaceAll(' ', '').split('='); + return [Number(pair[0]), pair[1].replaceAll('"', '')]; + }), + ); +} + +/** + * + * @param inputWitness - The circuit's input witness + * @param bytecode - The circuit buytecode + * @param workingDirectory - A directory to use for temporary files by the ACVM + * @param pathToAcvm - The path to the ACVm binary + * @returns The completed partial witness outputted from the circuit + */ +export async function executeNativeCircuit( + inputWitness: WitnessMap, + bytecode: Buffer, + workingDirectory: string, + pathToAcvm: string, +) { + const bytecodeFilename = 'bytecode'; + const witnessFilename = 'input_witness.toml'; + + // convert the witness map to TOML format + let witnessMap = ''; + inputWitness.forEach((value: string, key: number) => { + witnessMap = witnessMap.concat(`${key} = '${value}'\n`); + }); + + // In case the directory is still around from some time previously, remove it + await fs.rm(workingDirectory, { recursive: true, force: true }); + // Create the new working directory + await fs.mkdir(workingDirectory, { recursive: true }); + // Write the bytecode and input witness to the working directory + await fs.writeFile(`${workingDirectory}/${bytecodeFilename}`, bytecode); + await fs.writeFile(`${workingDirectory}/${witnessFilename}`, witnessMap); + + // Execute the ACVM using the given args + const args = [ + `execute`, + `--working-directory`, + `${workingDirectory}`, + `--bytecode`, + `${bytecodeFilename}`, + `--input-witness`, + `${witnessFilename}`, + `--print`, + ]; + const processPromise = new Promise((resolve, reject) => { + let outputWitness = Buffer.alloc(0); + let errorBuffer = Buffer.alloc(0); + const acvm = proc.spawn(pathToAcvm, args); + acvm.stdout.on('data', data => { + outputWitness = Buffer.concat([outputWitness, data]); + }); + acvm.stderr.on('data', data => { + errorBuffer = Buffer.concat([errorBuffer, data]); + }); + acvm.on('close', code => { + if (code === 0) { + resolve(outputWitness.toString('utf-8')); + } else { + reject(errorBuffer.toString('utf-8')); + } + }); + }); + + try { + const output = await processPromise; + return parseIntoWitnessMap(output); + } finally { + // Clean up the working directory before we leave + await fs.rm(workingDirectory, { recursive: true, force: true }); + } +} + +export class NativeACVMSimulator implements SimulationProvider { + constructor(private workingDirectory: string, private pathToAcvm: string) {} + async simulateCircuit(input: WitnessMap, compiledCircuit: NoirCompiledCircuit): Promise { + // Execute the circuit on those initial witness values + + // Decode the bytecode from base64 since the acvm does not know about base64 encoding + const decodedBytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); + + // Provide a unique working directory so we don't get clashes with parallel executions + const directory = `${this.workingDirectory}/${randomBytes(32).toString('hex')}`; + // Execute the circuit + const _witnessMap = await executeNativeCircuit(input, decodedBytecode, directory, this.pathToAcvm); + + return _witnessMap; + } +} diff --git a/yarn-project/sequencer-client/src/simulator/acvm_wasm.ts b/yarn-project/sequencer-client/src/simulator/acvm_wasm.ts new file mode 100644 index 000000000000..cdf49df7f5af --- /dev/null +++ b/yarn-project/sequencer-client/src/simulator/acvm_wasm.ts @@ -0,0 +1,31 @@ +import { NoirCompiledCircuit } from '@aztec/types/noir'; + +import { WasmBlackBoxFunctionSolver, createBlackBoxSolver, executeCircuitWithBlackBoxSolver } from '@noir-lang/acvm_js'; +import { WitnessMap } from '@noir-lang/types'; + +import { SimulationProvider } from './simulation_provider.js'; + +let solver: Promise; + +const getSolver = (): Promise => { + if (!solver) { + solver = createBlackBoxSolver(); + } + return solver; +}; + +export class WASMSimulator implements SimulationProvider { + async simulateCircuit(input: WitnessMap, compiledCircuit: NoirCompiledCircuit): Promise { + // Execute the circuit on those initial witness values + // + // Decode the bytecode from base64 since the acvm does not know about base64 encoding + const decodedBytecode = Buffer.from(compiledCircuit.bytecode, 'base64'); + // + // Execute the circuit + const _witnessMap = await executeCircuitWithBlackBoxSolver(await getSolver(), decodedBytecode, input, () => { + throw Error('unexpected oracle during execution'); + }); + + return _witnessMap; + } +} diff --git a/yarn-project/sequencer-client/src/simulator/index.ts b/yarn-project/sequencer-client/src/simulator/index.ts index 7bc2504999ee..38a8b441e4ee 100644 --- a/yarn-project/sequencer-client/src/simulator/index.ts +++ b/yarn-project/sequencer-client/src/simulator/index.ts @@ -55,3 +55,4 @@ export interface PublicKernelCircuitSimulator { */ publicKernelCircuitTeardown(inputs: PublicKernelCircuitPrivateInputs): Promise; } +export * from './acvm_wasm.js'; diff --git a/yarn-project/sequencer-client/src/simulator/public_kernel.ts b/yarn-project/sequencer-client/src/simulator/public_kernel.ts index 2c67a5d6c051..fd4681cbed8d 100644 --- a/yarn-project/sequencer-client/src/simulator/public_kernel.ts +++ b/yarn-project/sequencer-client/src/simulator/public_kernel.ts @@ -3,12 +3,19 @@ import { PublicKernelCircuitPrivateInputs, PublicKernelCircuitPublicInputs } fro import { createDebugLogger } from '@aztec/foundation/log'; import { elapsed } from '@aztec/foundation/timer'; import { - executePublicKernelAppLogic, - executePublicKernelSetup, - executePublicKernelTeardown, + PublicKernelAppLogicArtifact, + PublicKernelSetupArtifact, + PublicKernelTeardownArtifact, + convertPublicInnerRollupInputsToWitnessMap, + convertPublicInnerRollupOutputFromWitnessMap, + convertPublicSetupRollupInputsToWitnessMap, + convertPublicSetupRollupOutputFromWitnessMap, + convertPublicTailRollupInputsToWitnessMap, + convertPublicTailRollupOutputFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; -import { PublicKernelCircuitSimulator } from './index.js'; +import { PublicKernelCircuitSimulator, WASMSimulator } from './index.js'; +import { SimulationProvider } from './simulation_provider.js'; /** * Implements the PublicKernelCircuitSimulator. @@ -16,6 +23,11 @@ import { PublicKernelCircuitSimulator } from './index.js'; export class RealPublicKernelCircuitSimulator implements PublicKernelCircuitSimulator { private log = createDebugLogger('aztec:public-kernel-simulator'); + // Some circuits are so small it is faster to use WASM + private wasmSimulator: WASMSimulator = new WASMSimulator(); + + constructor(private simulator: SimulationProvider) {} + /** * Simulates the public kernel setup circuit from its inputs. * @param input - Inputs to the circuit. @@ -27,7 +39,11 @@ export class RealPublicKernelCircuitSimulator implements PublicKernelCircuitSimu if (!input.previousKernel.publicInputs.needsSetup) { throw new Error(`Expected previous kernel inputs to need setup`); } - const [duration, result] = await elapsed(() => executePublicKernelSetup(input)); + const inputWitness = convertPublicSetupRollupInputsToWitnessMap(input); + const [duration, witness] = await elapsed(() => + this.wasmSimulator.simulateCircuit(inputWitness, PublicKernelSetupArtifact), + ); + const result = convertPublicSetupRollupOutputFromWitnessMap(witness); this.log(`Simulated public kernel setup circuit`, { eventName: 'circuit-simulation', circuitName: 'public-kernel-setup', @@ -49,7 +65,11 @@ export class RealPublicKernelCircuitSimulator implements PublicKernelCircuitSimu if (!input.previousKernel.publicInputs.needsAppLogic) { throw new Error(`Expected previous kernel inputs to need app logic`); } - const [duration, result] = await elapsed(() => executePublicKernelAppLogic(input)); + const inputWitness = convertPublicInnerRollupInputsToWitnessMap(input); + const [duration, witness] = await elapsed(() => + this.wasmSimulator.simulateCircuit(inputWitness, PublicKernelAppLogicArtifact), + ); + const result = convertPublicInnerRollupOutputFromWitnessMap(witness); this.log(`Simulated public kernel app logic circuit`, { eventName: 'circuit-simulation', circuitName: 'public-kernel-app-logic', @@ -71,7 +91,11 @@ export class RealPublicKernelCircuitSimulator implements PublicKernelCircuitSimu if (!input.previousKernel.publicInputs.needsTeardown) { throw new Error(`Expected previous kernel inputs to need teardown`); } - const [duration, result] = await elapsed(() => executePublicKernelTeardown(input)); + const inputWitness = convertPublicTailRollupInputsToWitnessMap(input); + const [duration, witness] = await elapsed(() => + this.wasmSimulator.simulateCircuit(inputWitness, PublicKernelTeardownArtifact), + ); + const result = convertPublicTailRollupOutputFromWitnessMap(witness); this.log(`Simulated public kernel teardown circuit`, { eventName: 'circuit-simulation', circuitName: 'public-kernel-teardown', diff --git a/yarn-project/sequencer-client/src/simulator/rollup.ts b/yarn-project/sequencer-client/src/simulator/rollup.ts index 302d8d40be26..02dbc9a55354 100644 --- a/yarn-project/sequencer-client/src/simulator/rollup.ts +++ b/yarn-project/sequencer-client/src/simulator/rollup.ts @@ -8,9 +8,20 @@ import { } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { elapsed } from '@aztec/foundation/timer'; -import { executeBaseRollup, executeMergeRollup, executeRootRollup } from '@aztec/noir-protocol-circuits-types'; +import { + BaseRollupArtifact, + MergeRollupArtifact, + RootRollupArtifact, + convertBaseRollupInputsToWitnessMap, + convertBaseRollupOutputsFromWitnessMap, + convertMergeRollupInputsToWitnessMap, + convertMergeRollupOutputsFromWitnessMap, + convertRootRollupInputsToWitnessMap, + convertRootRollupOutputsFromWitnessMap, +} from '@aztec/noir-protocol-circuits-types'; -import { RollupSimulator } from './index.js'; +import { RollupSimulator, WASMSimulator } from './index.js'; +import { SimulationProvider } from './simulation_provider.js'; /** * Implements the rollup circuit simulator. @@ -18,21 +29,22 @@ import { RollupSimulator } from './index.js'; export class RealRollupCircuitSimulator implements RollupSimulator { private log = createDebugLogger('aztec:rollup-simulator'); + // Some circuits are so small it is faster to use WASM + private wasmSimulator: WASMSimulator = new WASMSimulator(); + + constructor(private simulationProvider: SimulationProvider) {} + /** * Simulates the base rollup circuit from its inputs. * @param input - Inputs to the circuit. * @returns The public inputs as outputs of the simulation. */ public async baseRollupCircuit(input: BaseRollupInputs): Promise { - const [duration, result] = await elapsed(() => executeBaseRollup(input)); + const witnessMap = convertBaseRollupInputsToWitnessMap(input); - this.log(`Simulated base rollup circuit`, { - eventName: 'circuit-simulation', - circuitName: 'base-rollup', - duration, - inputSize: input.toBuffer().length, - outputSize: result.toBuffer().length, - } satisfies CircuitSimulationStats); + const witness = await this.simulationProvider.simulateCircuit(witnessMap, BaseRollupArtifact); + + const result = convertBaseRollupOutputsFromWitnessMap(witness); return Promise.resolve(result); } @@ -42,15 +54,11 @@ export class RealRollupCircuitSimulator implements RollupSimulator { * @returns The public inputs as outputs of the simulation. */ public async mergeRollupCircuit(input: MergeRollupInputs): Promise { - const [duration, result] = await elapsed(() => executeMergeRollup(input)); + const witnessMap = convertMergeRollupInputsToWitnessMap(input); - this.log(`Simulated merge rollup circuit`, { - eventName: 'circuit-simulation', - circuitName: 'merge-rollup', - duration, - inputSize: input.toBuffer().length, - outputSize: result.toBuffer().length, - } satisfies CircuitSimulationStats); + const witness = await this.wasmSimulator.simulateCircuit(witnessMap, MergeRollupArtifact); + + const result = convertMergeRollupOutputsFromWitnessMap(witness); return result; } @@ -61,7 +69,11 @@ export class RealRollupCircuitSimulator implements RollupSimulator { * @returns The public inputs as outputs of the simulation. */ public async rootRollupCircuit(input: RootRollupInputs): Promise { - const [duration, result] = await elapsed(() => executeRootRollup(input)); + const witnessMap = convertRootRollupInputsToWitnessMap(input); + + const [duration, witness] = await elapsed(() => this.wasmSimulator.simulateCircuit(witnessMap, RootRollupArtifact)); + + const result = convertRootRollupOutputsFromWitnessMap(witness); this.log(`Simulated root rollup circuit`, { eventName: 'circuit-simulation', diff --git a/yarn-project/sequencer-client/src/simulator/simulation_provider.ts b/yarn-project/sequencer-client/src/simulator/simulation_provider.ts new file mode 100644 index 000000000000..a9fd92663517 --- /dev/null +++ b/yarn-project/sequencer-client/src/simulator/simulation_provider.ts @@ -0,0 +1,10 @@ +import { NoirCompiledCircuit } from '@aztec/types/noir'; + +import { WitnessMap } from '@noir-lang/types'; + +/** + * Low level simulation interface + */ +export interface SimulationProvider { + simulateCircuit(input: WitnessMap, compiledCircuit: NoirCompiledCircuit): Promise; +} diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index ba2110329892..543a2887af70 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -658,6 +658,7 @@ __metadata: "@jest/globals": ^29.5.0 "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js" "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi" + "@noir-lang/types": "portal:../../noir/packages/types" "@types/jest": ^29.5.0 "@types/node": ^18.7.23 jest: ^29.5.0 @@ -839,6 +840,8 @@ __metadata: "@aztec/types": "workspace:^" "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 + "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js" + "@noir-lang/types": "portal:../../noir/packages/types" "@types/jest": ^29.5.0 "@types/levelup": ^5.1.2 "@types/lodash.chunk": ^4.2.7 diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 000000000000..fb57ccd13afb --- /dev/null +++ b/yarn.lock @@ -0,0 +1,4 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + +