diff --git a/barretenberg/acir_tests/README.md b/barretenberg/acir_tests/README.md index 8b8eb8a554c8..11fa953b54ff 100644 --- a/barretenberg/acir_tests/README.md +++ b/barretenberg/acir_tests/README.md @@ -1,66 +1,46 @@ -# Acir Test Vector Runner +# ACIR Tests -The aim is to verify acir tests verify through a given backend binary. "Backend binaries" can include e.g.: +- Copies test programs from Noir (in `acir_tests/`) and compiles them +- The scripts/ folder assists bootstrap.sh in defining test scenarios involving compiled private Noir function artifacts (the bytecode being in ACIR format, hence the name of this module) +- The bootstrap.sh script is the source of truth for which proving modes are tested, e.g. solidity-friendly ultra honk uses --oracle_hash keccak. -- bb (native CLI) -- bb.js (typescript CLI) -- bb.js-dev (symlink in your PATH that runs the typescript CLI via ts-node) -- bb.js.browser (script in `headless-test` that runs a test through bb.js in a browser instance via playwright) +## Quick Start -## Building the tests. - -To build all the tests: - -``` +```bash +# Build all the test programs ./bootstrap.sh -``` -This will clone the acir test vectors from the noir repo, removing any that are not relevent. -It will then compile them all using local repo versions of nargo and bb (used for generating recursive inputs). - -## Running the tests. - -``` +# Run all tests ./bootstrap.sh test ``` -This will run all the tests as returned by `./bootstrap.sh test_cmds`. - -To run a single test you can: +## Running Specific Tests -``` -./run_test.sh -``` +The easiest way to find how to run specific test(s): -By default this will use the native binary `../cpp/build/bin/bb` and the `prove_and_verify` flow. +```bash +# See all available test commands +./bootstrap.sh test_cmds -You can substitute the backend binary using the `BIN` environment variable. -You can turn on logging with `VERBOSE` environment variable. -You can specify which proving system to use with the `SYS` variable (ultra_honk, ultra_rollup_honk, mega_honk). -If not specified it defaults to plonk (TODO: Make explicit). - -``` -$ SYS=ultra_honk BIN=bb.js VERBOSE=1 ./run_test.sh a_1_mul +# Find a specific test +./bootstrap.sh test_cmds | grep assert_statement ``` -You can use a relative path to an executable. e.g. if bb.js-dev is not symlinked into your PATH: - +This will show you the exact commands used in CI. For example: ``` -$ BIN=../ts/bb.js-dev VERBOSE=1 ./run_test.sh a_1_mul +c5f89...:ISOLATE=1 scripts/bb_prove_sol_verify.sh assert_statement --disable_zk +c5f89...:ISOLATE=1 scripts/bb_prove_sol_verify.sh assert_statement +c5f89... scripts/bb_prove_bbjs_verify.sh assert_statement +c5f89... scripts/bb_prove.sh assert_statement ``` -``` -$ BIN=./headless-test/bb.js.browser VERBOSE=1 ./run_test.sh a_1_mul +You can run any of these commands directly (ignore the hash prefix): +```bash +scripts/bb_prove.sh assert_statement ``` -You can specify a different testing "flow" with `FLOW` environment variable. Flows are in the `flows` dir. -The default flow is `prove_and_verify`, which is the quickest way to... prove and verify. It's used to test the acir -test vectors actually all pass in whichever version of the backend is being run. -The `all_cmds` flow tests all the supported commands on the binary. Slower, but is there to test the cli. +Programmatically, you can also do from root: +```bash +./barretenberg/acir_tests/bootstrap.sh test_cmds | grep assert_statement | ci3/parallelise ``` -$ FLOW=all_cmds ./run_acir_tests.sh a_1_mul -``` - -We currently have to use a separate flow script to run client_ivc scheme as opposed to just setting `SYS` due to -how cli commands are handled non-uniformly. diff --git a/barretenberg/acir_tests/bootstrap.sh b/barretenberg/acir_tests/bootstrap.sh index 5decf536abbf..7999f857d833 100755 --- a/barretenberg/acir_tests/bootstrap.sh +++ b/barretenberg/acir_tests/bootstrap.sh @@ -3,8 +3,6 @@ source $(git rev-parse --show-toplevel)/ci3/source_bootstrap cmd=${1:-} export CRS_PATH=$HOME/.bb-crs -native_build_dir=$(../cpp/scripts/native-preset-build-dir) -export bb=$(realpath ../cpp/$native_build_dir/bin/bb) tests_tar=barretenberg-acir-tests-$(hash_str \ $(../../noir/bootstrap.sh hash-tests) \ @@ -26,6 +24,8 @@ tests_hash=$(hash_str \ # Generate inputs for a given recursively verifying program. function run_proof_generation { local program=$1 + local native_build_dir=$(../cpp/scripts/native-preset-build-dir) + local bb=$(realpath ../cpp/$native_build_dir/bin/bb) local outdir=$(mktemp -d) trap "rm -rf $outdir" EXIT local adjustment=16 @@ -44,7 +44,7 @@ function run_proof_generation { if [[ $program == *"zk"* ]]; then disable_zk="" fi - local prove_cmd="$bb prove --scheme ultra_honk $disable_zk --init_kzg_accumulator $ipa_accumulation_flag --output_format fields --write_vk -o $outdir -b ./target/program.json -w ./target/witness.gz" + local prove_cmd="$bb prove --scheme ultra_honk $disable_zk $ipa_accumulation_flag --output_format fields --write_vk -o $outdir -b ./target/program.json -w ./target/witness.gz" echo_stderr "$prove_cmd" dump_fail "$prove_cmd" @@ -81,12 +81,24 @@ function generate_toml { function regenerate_recursive_inputs { local program=$1 # Compile the assert_statement test as it's used for the recursive tests. - COMPILE=2 ./scripts/run_test.sh assert_statement + cd ./acir_tests/assert_statement + local nargo=$(realpath ../../../../noir/noir-repo/target/release/nargo) + rm -rf target + $nargo compile --silence-warnings && $nargo execute + mv ./target/assert_statement.json ./target/program.json + mv ./target/assert_statement.gz ./target/witness.gz + cd ../.. parallel 'run_proof_generation {}' ::: $(ls internal_test_programs) } export -f regenerate_recursive_inputs run_proof_generation generate_toml +function compile { + echo_header "Compiling acir_tests" + local nargo=$(realpath ../../noir/noir-repo/target/release/nargo) + denoise "parallel --joblog joblog.txt --line-buffered 'cd {} && rm -rf target && $nargo compile --silence-warnings && $nargo execute && mv ./target/\$(basename {}).json ./target/program.json && mv ./target/\$(basename {}).gz ./target/witness.gz' ::: ./acir_tests/*" +} + function build { echo_header "acir_tests build" @@ -107,9 +119,8 @@ function build { # Generates the Prover.toml files for the recursive tests from the assert_statement test. denoise regenerate_recursive_inputs - # COMPILE=2 only compiles the test. - denoise "parallel --joblog joblog.txt --line-buffered 'COMPILE=2 ./scripts/run_test.sh \$(basename {})' ::: ./acir_tests/*" - + # Compile all tests + compile cache_upload $tests_tar acir_tests fi @@ -127,85 +138,76 @@ function test { # Paths are all relative to the repository root. # this function is used to generate the commands for running the tests. function test_cmds { + # NOTE: client-ivc commands are tested in yarn-project/end-to-end bench due to circular dependencies. + # Locally, you can do ./bootstrap.sh bench_ivc to run the 'tests' (benches with validation) + # non_recursive_tests include all of the non recursive test programs local non_recursive_tests=$(find ./acir_tests -maxdepth 1 -mindepth 1 -type d | \ grep -vE 'verify_honk_proof|verify_honk_zk_proof|verify_rollup_honk_proof') - local run_test=$(realpath --relative-to=$root ./scripts/run_test.sh) - local run_test_browser=$(realpath --relative-to=$root ./scripts/run_test_browser.sh) - local bbjs_bin="../ts/dest/node/main.js" + local scripts=$(realpath --relative-to=$root scripts) + local sol_prefix="$tests_hash:ISOLATE=1" # Solidity tests. Isolate because anvil. - local prefix="$tests_hash:ISOLATE=1" - echo "$prefix FLOW=sol_honk $run_test assert_statement" - echo "$prefix FLOW=sol_honk $run_test a_1_mul" - echo "$prefix FLOW=sol_honk $run_test slices" - echo "$prefix FLOW=sol_honk $run_test verify_honk_proof" - echo "$prefix FLOW=sol_honk_zk $run_test assert_statement" - echo "$prefix FLOW=sol_honk_zk $run_test a_1_mul" - echo "$prefix FLOW=sol_honk_zk $run_test slices" - echo "$prefix FLOW=sol_honk_zk $run_test verify_honk_proof" + # Test the solidity verifier with and without zk + for t in assert_statement a_1_mul slices verify_honk_proof; do + echo "$sol_prefix $scripts/bb_prove_sol_verify.sh $t --disable_zk" + echo "$sol_prefix $scripts/bb_prove_sol_verify.sh $t" + done + # prove with bb cli and verify with bb.js classes + echo "$sol_prefix $scripts/bb_prove_bbjs_verify.sh a_1_mul" + echo "$sol_prefix $scripts/bb_prove_bbjs_verify.sh assert_statement" # bb.js browser tests. Isolate because server. - local prefix="$tests_hash:ISOLATE=1:NET=1:CPUS=8" - echo "$prefix:NAME=chrome_verify_honk_proof BROWSER=chrome $run_test_browser verify_honk_proof" - echo "$prefix:NAME=chrome_a_1_mul BROWSER=chrome $run_test_browser a_1_mul" - echo "$prefix:NAME=webkit_verify_honk_proof BROWSER=webkit $run_test_browser verify_honk_proof" - echo "$prefix:NAME=webkit_a_1_mul BROWSER=webkit $run_test_browser a_1_mul" + local browser_prefix="$tests_hash:ISOLATE=1:NET=1:CPUS=8" + echo "$browser_prefix:NAME=chrome_verify_honk_proof $scripts/browser_prove.sh verify_honk_proof chrome" + echo "$browser_prefix:NAME=chrome_a_1_mul $scripts/browser_prove.sh a_1_mul chrome" + echo "$browser_prefix:NAME=webkit_verify_honk_proof $scripts/browser_prove.sh verify_honk_proof webkit" + echo "$browser_prefix:NAME=webkit_a_1_mul $scripts/browser_prove.sh a_1_mul webkit" # bb.js tests. - local prefix=$tests_hash # ecdsa_secp256r1_3x through bb.js on node to check 256k support. - echo "$prefix BIN=$bbjs_bin SYS=ultra_honk_deprecated FLOW=prove_then_verify $run_test ecdsa_secp256r1_3x" + echo "$tests_hash $scripts/bbjs_prove.sh ecdsa_secp256r1_3x" # the prove then verify flow for UltraHonk. This makes sure we have the same circuit for different witness inputs. - echo "$prefix BIN=$bbjs_bin SYS=ultra_honk_deprecated FLOW=prove_then_verify $run_test a_6_array" + echo "$tests_hash $scripts/bbjs_prove.sh a_6_array" - # barretenberg-acir-tests-bb: # Fold and verify an ACIR program stack using ClientIVC, recursively verify as part of the Tube circuit and produce and verify a Honk proof - echo "$prefix FLOW=prove_then_verify_tube $run_test a_6_array" + echo "$tests_hash $scripts/bb_tube_prove.sh a_6_array" - # barretenberg-acir-tests-bb-ultra-honk: - # SYS decides which scheme will be used for the test. - # FLOW decides which script (prove, verify, prove_then_verify, etc.) will be ran for t in $non_recursive_tests; do - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify $run_test $(basename $t)" + echo "$tests_hash $scripts/bb_prove.sh $(basename $t)" done - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify $run_test assert_statement" + echo "$tests_hash $scripts/bb_prove.sh assert_statement" # Run the UH recursive verifier tests with ZK. - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify $run_test verify_honk_proof" - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify $run_test double_verify_honk_proof" + echo "$tests_hash $scripts/bb_prove.sh verify_honk_proof" + echo "$tests_hash $scripts/bb_prove.sh double_verify_honk_proof" # Run the UH recursive verifier tests without ZK. - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify DISABLE_ZK=true $run_test double_verify_honk_proof" + echo "$tests_hash $scripts/bb_prove.sh double_verify_honk_proof --disable_zk" # Run the ZK UH recursive verifier tests. - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify $run_test double_verify_honk_zk_proof" + echo "$tests_hash $scripts/bb_prove.sh double_verify_honk_zk_proof" # Run the ZK UH recursive verifier tests without ZK. - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify DISABLE_ZK=true $run_test double_verify_honk_zk_proof" + echo "$tests_hash $scripts/bb_prove.sh double_verify_honk_zk_proof --disable_zk" - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify HASH=keccak $run_test assert_statement" - # echo "$prefix SYS=ultra_honk FLOW=prove_then_verify HASH=starknet $run_test assert_statement" - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify ROLLUP=true $run_test verify_rollup_honk_proof" - # Run the assert_statement test with the --disable_zk flag. - echo "$prefix SYS=ultra_honk FLOW=prove_then_verify DISABLE_ZK=true $run_test assert_statement" + echo "$tests_hash $scripts/bb_prove.sh assert_statement --oracle_hash keccak" + # If starknet enabled: + #echo "$tests_hash $scripts/bb_prove.sh assert_statement --oracle_hash starknet" + # Test rollup verification (rollup uses --ipa_accumulation) + echo "$tests_hash $scripts/bb_prove.sh verify_rollup_honk_proof --ipa_accumulation" + # Run the assert_statement test with ZK disabled. + echo "$tests_hash $scripts/bb_prove.sh assert_statement --disable_zk" # prove and verify using bb.js classes - echo "$prefix SYS=ultra_honk FLOW=bbjs_prove_verify $run_test a_1_mul" - echo "$prefix SYS=ultra_honk FLOW=bbjs_prove_verify $run_test assert_statement" - - # prove with bb.js and verify with solidity verifier - echo "$prefix SYS=ultra_honk FLOW=bbjs_prove_sol_verify $run_test a_1_mul" - echo "$prefix SYS=ultra_honk FLOW=bbjs_prove_sol_verify $run_test assert_statement" - - # prove with bb cli and verify with bb.js classes - echo "$prefix SYS=ultra_honk FLOW=bb_prove_bbjs_verify $run_test a_1_mul" - echo "$prefix SYS=ultra_honk FLOW=bb_prove_bbjs_verify $run_test assert_statement" + echo "$tests_hash $scripts/bbjs_prove.sh a_1_mul" + echo "$tests_hash $scripts/bbjs_prove.sh assert_statement" # prove with bb.js and verify with bb cli - echo "$prefix SYS=ultra_honk FLOW=bbjs_prove_bb_verify $run_test a_1_mul" - echo "$prefix SYS=ultra_honk FLOW=bbjs_prove_bb_verify $run_test assert_statement" + echo "$tests_hash $scripts/bbjs_prove_bb_verify.sh a_1_mul" + echo "$tests_hash $scripts/bbjs_prove_bb_verify.sh assert_statement" } function bench_cmds { + local dir=$(realpath --relative-to=$root .) echo "$tests_hash:CPUS=16 barretenberg/acir_tests/scripts/run_bench.sh ultra_honk_rec_wasm_memory" \ - "'BIN=../ts/dest/node/main.js SYS=ultra_honk_deprecated FLOW=prove_then_verify ./scripts/run_test.sh verify_honk_proof'" + "'scripts/bbjs_legacy_cli_prove.sh verify_honk_proof'" } # TODO(https://github.com/AztecProtocol/barretenberg/issues/1254): More complete testing, including failure tests @@ -228,6 +230,9 @@ case "$cmd" in "hash") echo $tests_hash ;; + "compile") + compile + ;; test|test_cmds|bench|bench_cmds) $cmd ;; diff --git a/barretenberg/acir_tests/flows/all_cmds.sh b/barretenberg/acir_tests/flows/all_cmds.sh deleted file mode 100755 index 7d1a9722d538..000000000000 --- a/barretenberg/acir_tests/flows/all_cmds.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/sh -set -eu - -VFLAG=${VERBOSE:+-v} -BFLAG="-b ./target/program.json" -FLAGS="-c $CRS_PATH $VFLAG" -# the commands called here are subcommands of the OLD_API command in the native bb binary, -# but no such refactoring was done to the node binary. This is because the node binary is -# deprecated and UltraPlonk is also deprecated. -MAYBE_OLD_API=${NATIVE:+OLD_API} - -# Test we can perform the proof/verify flow. -$BIN $MAYBE_OLD_API gates $FLAGS $BFLAG > /dev/null -$BIN $MAYBE_OLD_API prove -o proof $FLAGS $BFLAG -$BIN $MAYBE_OLD_API write_vk -o vk $FLAGS $BFLAG -$BIN $MAYBE_OLD_API write_pk -o pk $FLAGS $BFLAG -$BIN $MAYBE_OLD_API verify -k vk -p proof $FLAGS - -# Check supplemental functions. -# Grep to determine success. -$BIN $MAYBE_OLD_API contract -k vk $BFLAG -o - | grep "Verification Key Hash" > /dev/null -# Use jq to determine success, and also check result not empty. -OUTPUT=$($BIN $MAYBE_OLD_API proof_as_fields -p proof -k vk -o - | jq .) -[ -n "$OUTPUT" ] || exit 1 -OUTPUT=$($BIN $MAYBE_OLD_API vk_as_fields -k vk -o - | jq .) -[ -n "$OUTPUT" ] || exit 1 diff --git a/barretenberg/acir_tests/flows/bb_prove_bbjs_verify.sh b/barretenberg/acir_tests/flows/bb_prove_bbjs_verify.sh deleted file mode 100755 index 169b92ece3c5..000000000000 --- a/barretenberg/acir_tests/flows/bb_prove_bbjs_verify.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -# prove with bb.js and verify using bb classes -set -eu - -if [ "${SYS:-}" != "ultra_honk" ]; then - echo "Error: This flow only supports ultra_honk" - exit 1 -fi - -artifact_dir=$(realpath ./target) -output_dir=$artifact_dir/bbjs-bb-tmp -mkdir -p $output_dir - -# Cleanup on exit -trap "rm -rf $output_dir" EXIT - -# Generate the VK using BB CLI -$BIN write_vk \ - --scheme ultra_honk \ - -b $artifact_dir/program.json \ - -o $output_dir - -# Generate the proof using BB CLI (save as both bytes and fields) -$BIN prove \ - --scheme ultra_honk \ - -b $artifact_dir/program.json \ - -w $artifact_dir/witness.gz \ - -k $output_dir/vk \ - --output_format bytes_and_fields \ - -o $output_dir - -# Verify the proof with bb.js classes -node ../../bbjs-test verify \ - -d $output_dir diff --git a/barretenberg/acir_tests/flows/bbjs_prove_bb_verify.sh b/barretenberg/acir_tests/flows/bbjs_prove_bb_verify.sh deleted file mode 100755 index 241400698779..000000000000 --- a/barretenberg/acir_tests/flows/bbjs_prove_bb_verify.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -# prove with bb.js and verify using bb cli -set -eu - -if [ "${SYS:-}" != "ultra_honk" ]; then - echo "Error: This flow only supports ultra_honk" - exit 1 -fi - -artifact_dir=$(realpath ./target) -output_dir=$artifact_dir/bb-bbjs-tmp -mkdir -p $output_dir - -# Cleanup on exit -trap "rm -rf $output_dir" EXIT - -# Writes the proof, public inputs ./target; this also writes the VK -node ../../bbjs-test prove \ - -b $artifact_dir/program.json \ - -w $artifact_dir/witness.gz \ - -o $output_dir - -proof_bytes=$(cat $output_dir/proof | xxd -p) -public_inputs=$(cat $output_dir/public_inputs_fields.json | jq -r '.[]') - -public_inputs_bytes="" -for input in $public_inputs; do - public_inputs_bytes+=$input -done - -# Combine proof header and the proof to a single file -echo -n $proof_bytes | xxd -r -p > $output_dir/proof -echo -n $public_inputs_bytes | xxd -r -p > $output_dir/public_inputs -echo "$BIN verify \ - --scheme ultra_honk \ - -k $output_dir/vk \ - -p $output_dir/proof \ - -i $output_dir/public_inputs" - -# Verify the proof with bb cli -$BIN verify \ - --scheme ultra_honk \ - -k $output_dir/vk \ - -p $output_dir/proof \ - -i $output_dir/public_inputs diff --git a/barretenberg/acir_tests/flows/bbjs_prove_sol_verify.sh b/barretenberg/acir_tests/flows/bbjs_prove_sol_verify.sh deleted file mode 100755 index a23bf447e391..000000000000 --- a/barretenberg/acir_tests/flows/bbjs_prove_sol_verify.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash - -# prove using bb.js and verify using solidity verifier -set -eu - -if [ "${SYS:-}" != "ultra_honk" ]; then - echo "Error: This flow only supports ultra_honk" - exit 1 -fi - -artifact_dir=$(realpath ./target) -output_dir=$artifact_dir/bbjs-sol-tmp -mkdir -p $output_dir - -# Cleanup on exit -trap "rm -rf $output_dir" EXIT - -# Generate the proof and VK -node ../../bbjs-test prove \ - -b $artifact_dir/program.json \ - -w $artifact_dir/witness.gz \ - -o $output_dir \ - --oracle-hash keccakZK - -# Write the solidity verifier to ./target -export VK=$output_dir/vk -export VERIFIER_PATH="$output_dir/Verifier.sol" - -# Use the BB CLI to write the solidity verifier - this can also be done with bb.js -$BIN write_solidity_verifier --scheme ultra_honk -k $VK -o $VERIFIER_PATH - -# Verify the proof using the solidity verifier -export PROOF=$output_dir/proof -export HAS_ZK="true" -export PROOF_AS_FIELDS=$output_dir/proof_fields.json -export PUBLIC_INPUTS_AS_FIELDS=$output_dir/public_inputs_fields.json -export TEST_PATH=$(realpath "../../sol-test/HonkTest.sol") -export TESTING_HONK="true" -export TEST_NAME=$(basename $(realpath ./)) - -node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/flows/bbjs_prove_verify.sh b/barretenberg/acir_tests/flows/bbjs_prove_verify.sh deleted file mode 100755 index 3eec6dadf8e4..000000000000 --- a/barretenberg/acir_tests/flows/bbjs_prove_verify.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -# prove and verify using bb.js classes -set -eu - -if [ "${SYS:-}" != "ultra_honk" ]; then - echo "Error: This flow only supports ultra_honk" - exit 1 -fi - -artifact_dir=$(realpath ./target) -output_dir=$artifact_dir/bbjs-tmp -mkdir -p $output_dir - -# Cleanup on exit -trap "rm -rf $output_dir" EXIT - -# Writes the proof, public inputs ./target; this also writes the VK -node ../../bbjs-test prove \ - -b $artifact_dir/program.json \ - -w $artifact_dir/witness.gz \ - -o $output_dir \ - --multi-threaded - -# Verify the proof by reading the files in ./target -node ../../bbjs-test verify \ - -d $output_dir diff --git a/barretenberg/acir_tests/flows/fail.sh b/barretenberg/acir_tests/flows/fail.sh deleted file mode 100755 index c3adb8059ab7..000000000000 --- a/barretenberg/acir_tests/flows/fail.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash -# Fails, for testing the harness. -echo Forced failure. -false diff --git a/barretenberg/acir_tests/flows/prove_and_verify.sh b/barretenberg/acir_tests/flows/prove_and_verify.sh deleted file mode 100755 index 8fc1ea99a6dc..000000000000 --- a/barretenberg/acir_tests/flows/prove_and_verify.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# prove_and_verify produces no output, so is parallel safe. -set -eu - -flags="-c $CRS_PATH ${VERBOSE:+-v}" -[ "${RECURSIVE}" = "true" ] && flags+=" --recursive" - -case ${SYS:-} in - "") - cmd=prove_and_verify - ;; - *) - cmd=prove_and_verify_$SYS - ;; -esac - -$BIN $cmd $flags -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_and_verify_program.sh b/barretenberg/acir_tests/flows/prove_and_verify_program.sh deleted file mode 100755 index b9963875f105..000000000000 --- a/barretenberg/acir_tests/flows/prove_and_verify_program.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh -# prove_and_verify produces no output, so is parallel safe. -set -eu - -VFLAG=${VERBOSE:+-v} -FLAGS="-c $CRS_PATH $VFLAG" -[ "${RECURSIVE}" = "true" ] && FLAGS+=" --recursive" - -$BIN prove_and_verify_${SYS}_program $FLAGS -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_then_verify.sh b/barretenberg/acir_tests/flows/prove_then_verify.sh deleted file mode 100755 index 83b580053bb8..000000000000 --- a/barretenberg/acir_tests/flows/prove_then_verify.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash -# prove_then_verify produces intermediate state. We use process substitution to make parallel safe. -set -eu - -BFLAG="-b ./target/program.json" -FLAGS="-c $CRS_PATH ${VERBOSE:+-v}" -[ "${RECURSIVE}" = "true" ] && FLAGS+=" --recursive" - -# Test we can perform the proof/verify flow. -# This ensures we test independent pk construction through real/garbage witness data paths. -# We use process substitution pipes to avoid temporary files, which need cleanup, and can collide with parallelism. - -case ${SYS:-} in - "") - # Deprecated; used for old node cli - [ -n "${SYS:-}" ] && SYS="_$SYS" || SYS="" - $BIN verify$SYS $FLAGS \ - -k <($BIN write_vk$SYS -o - $FLAGS $BFLAG) \ - -p <($BIN prove$SYS -o - $FLAGS $BFLAG) - ;; - "ultra_honk") - FLAGS+=" --scheme $SYS --oracle_hash ${HASH:-poseidon2}" - [ "${ROLLUP:-false}" = "true" ] && FLAGS+=" --ipa_accumulation" - [ "${RECURSIVE}" = "true" ] && FLAGS+=" --init_kzg_accumulator" - # DISABLE_ZK controls whether the zero-knowledge property is disabled. - # the flag is by default false, and when true, --disable_zk is added to the flags. - [ "${DISABLE_ZK:-false}" = "true" ] && FLAGS+=" --disable_zk" - - OUTDIR=$(mktemp -d) - trap "rm -rf $OUTDIR" EXIT - $BIN write_vk $FLAGS $BFLAG -o $OUTDIR - $BIN prove $FLAGS $BFLAG -k $OUTDIR/vk -o $OUTDIR - $BIN verify $FLAGS \ - -k $OUTDIR/vk \ - -p $OUTDIR/proof \ - -i $OUTDIR/public_inputs - ;; - "ultra_honk_deprecated") - # TODO(https://github.com/AztecProtocol/barretenberg/issues/1434) deprecated flow is necessary until we finish C++ api refactor and then align ts api - SYS_DEP=_ultra_honk - OUTDIR=$(mktemp -d) - trap "rm -rf $OUTDIR" EXIT - $BIN write_vk$SYS_DEP $FLAGS $BFLAG -o $OUTDIR/vk - $BIN prove$SYS_DEP -o $OUTDIR/proof $FLAGS $BFLAG -k $OUTDIR/vk - $BIN verify$SYS_DEP $FLAGS \ - -k $OUTDIR/vk \ - -p $OUTDIR/proof - ;; - *) - [ -n "${SYS:-}" ] && SYS="_$SYS" || SYS="" - $BIN verify$SYS $FLAGS \ - -k <($BIN write_vk$SYS -o - $FLAGS $BFLAG) \ - -p <($BIN prove$SYS -o - $FLAGS $BFLAG) - ;; -esac diff --git a/barretenberg/acir_tests/flows/prove_then_verify_client_ivc.sh b/barretenberg/acir_tests/flows/prove_then_verify_client_ivc.sh deleted file mode 100755 index 957867df46f6..000000000000 --- a/barretenberg/acir_tests/flows/prove_then_verify_client_ivc.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash -# Create intermediate state in a directory. Uses a temp dir to ensure parallel safe and cleans up on exit. -# TODO this is unused -set -eux - -CRS_PATH=${CRS_PATH:-$HOME/.bb-crs} -BIN=$(realpath ${BIN:-../cpp/build/bin/bb}) - -[ -n "${1:-}" ] && cd ./acir_tests/$1 - -outdir=$(mktemp -d) -trap "rm -rf $outdir" EXIT - -flags="--scheme client_ivc -c $CRS_PATH ${VERBOSE:+-v}" - -parallel ::: \ - "$BIN prove $flags -i target/ivc-inputs.msgpack $INFLAG --output_format proof -o $outdir" \ - "$BIN write_vk $flags -i target/ivc-inputs.msgpack $INFLAG --verifier_type ivc -o $outdir" -$BIN verify $flags -p $outdir/proof -k $outdir/vk diff --git a/barretenberg/acir_tests/flows/prove_then_verify_tube.sh b/barretenberg/acir_tests/flows/prove_then_verify_tube.sh deleted file mode 100755 index 97a1f7a6032e..000000000000 --- a/barretenberg/acir_tests/flows/prove_then_verify_tube.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh -set -eux - -mkdir -p ./proofs - -CRS_PATH=${CRS_PATH:-$HOME/.bb-crs} -BIN=$(realpath ${BIN:-../cpp/build/bin/bb}) - -[ -n "${1:-}" ] && cd ./acir_tests/$1 - -outdir=$(mktemp -d) -trap "rm -rf $outdir" EXIT - -# TODO(https://github.com/AztecProtocol/barretenberg/issues/1252): deprecate in favor of normal proving flow -$BIN OLD_API write_arbitrary_valid_client_ivc_proof_and_vk_to_file -c $CRS_PATH ${VERBOSE:+-v} -o $outdir -$BIN prove_tube -c $CRS_PATH ${VERBOSE:+-v} -k $outdir/vk -o $outdir -# TODO(https://github.com/AztecProtocol/barretenberg/issues/1322): Just call verify. -$BIN verify_tube -c $CRS_PATH ${VERBOSE:+-v} -o $outdir diff --git a/barretenberg/acir_tests/flows/sol.sh b/barretenberg/acir_tests/flows/sol.sh deleted file mode 100755 index bcb06e9fe392..000000000000 --- a/barretenberg/acir_tests/flows/sol.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/sh -set -eu - -VFLAG=${VERBOSE:+-v} -BFLAG="-b ./target/program.json" -FLAGS="-c $CRS_PATH $VFLAG" - -export PROOF="$PWD/sol_proof" -export PROOF_AS_FIELDS="$PWD/sol_proof_fields.json" -export VK="$PWD/sol_vk" - -# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs -$BIN OLD_API prove -o $PROOF $FLAGS -$BIN OLD_API write_vk -o $VK $FLAGS -$BIN OLD_API proof_as_fields -k $VK $FLAGS -p $PROOF -$BIN OLD_API contract -k $VK $FLAGS $BFLAG -o Key.sol - -# Export the paths to the environment variables for the js test runner -export KEY_PATH="$PWD/Key.sol" -export VERIFIER_PATH=$(realpath "../../sol-test/Verifier.sol") -export TEST_PATH=$(realpath "../../sol-test/Test.sol") -export BASE_PATH=$(realpath "../../../sol/src/ultra/BaseUltraVerifier.sol") - -# Use solcjs to compile the generated key contract with the template verifier and test contract -# index.js will start an anvil, on a random port -# Deploy the verifier then send a test transaction -export TEST_NAME=$(basename $(pwd)) -node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/flows/sol_honk.sh b/barretenberg/acir_tests/flows/sol_honk.sh deleted file mode 100755 index 07258678a98b..000000000000 --- a/barretenberg/acir_tests/flows/sol_honk.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -set -eu - -VFLAG=${VERBOSE:+-v} -BFLAG="-b ./target/program.json" -FLAGS="-c $CRS_PATH $VFLAG --scheme ultra_honk --disable_zk" -PROVE_FLAGS="$FLAGS $BFLAG --oracle_hash keccak --output_format bytes_and_fields --write_vk" -VERIFY_FLAGS="$FLAGS --oracle_hash keccak" - -outdir=$(mktemp -d) -trap "rm -rf $outdir" EXIT - -# Export the paths to the environment variables for the js test runner -export PUBLIC_INPUTS="$outdir/public_inputs" -export PUBLIC_INPUTS_AS_FIELDS="$outdir/public_inputs_fields.json" -export PROOF="$outdir/proof" -export PROOF_AS_FIELDS="$outdir/proof_fields.json" -export VK="$outdir/vk" -export VERIFIER_CONTRACT="$outdir/Verifier.sol" - -# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs -$BIN prove $PROVE_FLAGS -o $outdir -$BIN verify $VERIFY_FLAGS -i $PUBLIC_INPUTS -k $VK -p $PROOF -$BIN write_solidity_verifier $FLAGS -k $VK -o $VERIFIER_CONTRACT - -# Export the paths to the environment variables for the js test runner -export VERIFIER_PATH="$outdir/Verifier.sol" -export TEST_PATH=$(realpath "../../sol-test/HonkTest.sol") -export TESTING_HONK="true" - -# Use solcjs to compile the generated key contract with the template verifier and test contract -# index.js will start an anvil, on a random port -# Deploy the verifier then send a test transaction -export TEST_NAME=$(basename $outdir) -node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/flows/sol_honk_zk.sh b/barretenberg/acir_tests/flows/sol_honk_zk.sh deleted file mode 100755 index 93c93c1be692..000000000000 --- a/barretenberg/acir_tests/flows/sol_honk_zk.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -set -eux - -VFLAG=${VERBOSE:+-v} -BFLAG="-b ./target/program.json" -FLAGS="-c $CRS_PATH $VFLAG --scheme ultra_honk" -PROTOCOL_FLAGS=" --honk_recursion 1 --oracle_hash keccak" - -outdir=$(mktemp -d) -trap "rm -rf $outdir" EXIT - -# Export the paths to the environment variables for the js test runner -export PUBLIC_INPUTS="$outdir/public_inputs" -export PUBLIC_INPUTS_AS_FIELDS="$outdir/public_inputs_fields.json" -export PROOF="$outdir/proof" -export PROOF_AS_FIELDS="$outdir/proof_fields.json" -export VK="$outdir/vk" -export VERIFIER_CONTRACT="$outdir/Verifier.sol" - -# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs -$BIN prove -o $outdir $FLAGS $BFLAG $PROTOCOL_FLAGS --output_format bytes_and_fields --write_vk -$BIN verify -i $PUBLIC_INPUTS -k $VK -p $PROOF $FLAGS $PROTOCOL_FLAGS -$BIN write_solidity_verifier $FLAGS -k $VK -o $VERIFIER_CONTRACT - -# Export the paths to the environment variables for the js test runner -export VERIFIER_PATH="$outdir/Verifier.sol" -export TEST_PATH=$(realpath "../../sol-test/HonkTest.sol") -export TESTING_HONK="true" -export HAS_ZK="true" - -# Use solcjs to compile the generated key contract with the template verifier and test contract -# index.js will start an anvil, on a random port -# Deploy the verifier then send a test transaction -export TEST_NAME=$(basename $outdir) -node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/scripts/bb_prove.sh b/barretenberg/acir_tests/scripts/bb_prove.sh new file mode 100755 index 000000000000..0e6f7920bbdb --- /dev/null +++ b/barretenberg/acir_tests/scripts/bb_prove.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +export HARDWARE_CONCURRENCY=8 + +cd ../acir_tests/$1 + +bb=$(../../../cpp/scripts/find-bb) + +shift +# Base flags + our commandline args +flags="-v --scheme ultra_honk $*" + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +# Generate VK +$bb write_vk $flags -b target/program.json -o output-$$ + +# Prove +$bb prove $flags -b target/program.json -k output-$$/vk -o output-$$ + +# Verify +$bb verify $flags \ + -k output-$$/vk \ + -p output-$$/proof \ + -i output-$$/public_inputs diff --git a/barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh b/barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh new file mode 100755 index 000000000000..c29b45c212cf --- /dev/null +++ b/barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +cd ../acir_tests/$1 + +export HARDWARE_CONCURRENCY=8 + +bb=$(../../../cpp/scripts/find-bb) + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +# Generate the VK using BB CLI +$bb write_vk \ + --scheme ultra_honk \ + -b target/program.json \ + -o output-$$ + +# Generate the proof using BB CLI (save as both bytes and fields) +$bb prove \ + --scheme ultra_honk \ + -b target/program.json \ + -w target/witness.gz \ + -k output-$$/vk \ + --output_format bytes_and_fields \ + -o output-$$ + +# Verify the proof with bb.js classes +node ../../bbjs-test verify \ + -d output-$$ diff --git a/barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh b/barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh new file mode 100755 index 000000000000..e17328df1743 --- /dev/null +++ b/barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +cd ../acir_tests/$1 + +export HARDWARE_CONCURRENCY=8 + +bb=$(../../../cpp/scripts/find-bb) + +# Build base flags +flags="-v --scheme ultra_honk" + +# Add any additional arguments passed from command line +shift +for arg in "$@"; do + flags+=" $arg" +done + +# Check if --disable_zk is in the flags to determine HAS_ZK +if [[ "$flags" == *"--disable_zk"* ]]; then + has_zk="false" +else + has_zk="true" +fi + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs +$bb prove $flags -b target/program.json --oracle_hash keccak --output_format bytes_and_fields --write_vk -o output-$$ +$bb verify $flags --oracle_hash keccak -i output-$$/public_inputs -k output-$$/vk -p output-$$/proof +$bb write_solidity_verifier $flags -k output-$$/vk -o output-$$/Verifier.sol + +# Use solcjs to compile the generated key contract with the template verifier and test contract +# index.js will start an anvil, on a random port +# Deploy the verifier then send a test transaction +PROOF="output-$$/proof" \ +PROOF_AS_FIELDS="output-$$/proof_fields.json" \ +PUBLIC_INPUTS_AS_FIELDS="output-$$/public_inputs_fields.json" \ +VERIFIER_PATH="output-$$/Verifier.sol" \ +TEST_PATH="../../sol-test/HonkTest.sol" \ +HAS_ZK="$has_zk" \ +TEST_NAME=$(basename output-$$) \ + node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/scripts/bb_tube_prove.sh b/barretenberg/acir_tests/scripts/bb_tube_prove.sh new file mode 100755 index 000000000000..80cd0dfa61fc --- /dev/null +++ b/barretenberg/acir_tests/scripts/bb_tube_prove.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +export HARDWARE_CONCURRENCY=8 + +cd ../acir_tests/$1 + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +bb=$(../../../cpp/scripts/find-bb) +# TODO(https://github.com/AztecProtocol/barretenberg/issues/1252): deprecate in favor of normal proving flow +$bb OLD_API write_arbitrary_valid_client_ivc_proof_and_vk_to_file -v -o output-$$ +$bb prove_tube -v -k output-$$/vk -o output-$$ +# TODO(https://github.com/AztecProtocol/barretenberg/issues/1322): Just call verify. +$bb verify_tube -v -o output-$$ diff --git a/barretenberg/acir_tests/scripts/bbjs_legacy_cli_prove.sh b/barretenberg/acir_tests/scripts/bbjs_legacy_cli_prove.sh new file mode 100755 index 000000000000..e9fa668f75ac --- /dev/null +++ b/barretenberg/acir_tests/scripts/bbjs_legacy_cli_prove.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +export HARDWARE_CONCURRENCY=8 + +cd ../acir_tests/$1 + +# NOTE The bb.js main file is deprecated! +bbjs_bin="../../../ts/dest/node/main.js" + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +# Generate VK +node $bbjs_bin write_vk_ultra_honk -v -b target/program.json -o output-$$/vk + +# Prove +node $bbjs_bin prove_ultra_honk -o output-$$/proof -v -b target/program.json -k output-$$/vk + +# Verify +node $bbjs_bin verify_ultra_honk -v \ + -k output-$$/vk \ + -p output-$$/proof diff --git a/barretenberg/acir_tests/scripts/bbjs_prove.sh b/barretenberg/acir_tests/scripts/bbjs_prove.sh new file mode 100755 index 000000000000..d67f4330909d --- /dev/null +++ b/barretenberg/acir_tests/scripts/bbjs_prove.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +cd ../acir_tests/$1 + +export HARDWARE_CONCURRENCY=8 + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +# Writes the proof, public inputs ./target; this also writes the VK +node ../../bbjs-test prove \ + -b target/program.json \ + -w target/witness.gz \ + -o output-$$ \ + --multi-threaded + +# Verify the proof by reading the files in ./target +node ../../bbjs-test verify \ + -d output-$$ diff --git a/barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh b/barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh new file mode 100755 index 000000000000..f5cdb196f6e3 --- /dev/null +++ b/barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +export HARDWARE_CONCURRENCY=8 + +cd ../acir_tests/$1 + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +# Writes the proof, public inputs ./target; this also writes the VK +node ../../bbjs-test prove \ + -b target/program.json \ + -w target/witness.gz \ + -o output-$$ + +proof_bytes=$(cat output-$$/proof | xxd -p) +public_inputs=$(cat output-$$/public_inputs_fields.json | jq -r '.[]') + +public_inputs_bytes="" +for input in $public_inputs; do + public_inputs_bytes+=$input +done + +# Combine proof header and the proof to a single file +echo -n $proof_bytes | xxd -r -p > output-$$/proof +echo -n $public_inputs_bytes | xxd -r -p > output-$$/public_inputs + +bb=$(../../../cpp/scripts/find-bb) +# Verify the proof with bb cli +$bb verify \ + --scheme ultra_honk \ + -k output-$$/vk \ + -p output-$$/proof \ + -i output-$$/public_inputs diff --git a/barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh b/barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh new file mode 100755 index 000000000000..180a5ab4518a --- /dev/null +++ b/barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +export HARDWARE_CONCURRENCY=8 + +cd ../acir_tests/$1 + +mkdir -p output-$$ +trap "rm -rf output-$$" EXIT + +# Generate the proof and VK +node ../../bbjs-test prove \ + -b target/program.json \ + -w target/witness.gz \ + -o output-$$ \ + --oracle-hash $oracle_hash + +bb=$(../../../cpp/scripts/find-bb) + +# Default to keccakZK for solidity compatibility +oracle_hash="keccakZK" +has_zk="true" + +# Process additional arguments +shift +for arg in "$@"; do + if [[ "$arg" == "--disable_zk" ]]; then + has_zk="false" + oracle_hash="keccak" + fi +done + +# Use the BB CLI to write the solidity verifier - this can also be done with bb.js +$bb write_solidity_verifier --scheme ultra_honk -k output-$$/vk -o output-$$/Verifier.sol + +# Verify the proof using the solidity verifier +PROOF="output-$$/proof" \ +PROOF_AS_FIELDS="output-$$/proof_fields.json" \ +PUBLIC_INPUTS_AS_FIELDS="output-$$/public_inputs_fields.json" \ +VERIFIER_PATH="output-$$/Verifier.sol" \ +TEST_PATH="../../sol-test/HonkTest.sol" \ +HAS_ZK="$has_zk" \ +TEST_NAME=$(basename $(realpath .)) \ + node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/scripts/bench_acir_tests.sh b/barretenberg/acir_tests/scripts/bench_acir_tests.sh deleted file mode 100755 index 70d94d34497f..000000000000 --- a/barretenberg/acir_tests/scripts/bench_acir_tests.sh +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env bash -set -e - -cd "$(dirname "$0")" - -TEST_NAMES=("$@") -THREADS=(1 4 16 32 64) -BENCHMARKS=$LOG_FILE - -if [[ -z "${LOG_FILE}" ]]; then - BENCHMARKS=$(mktemp) -fi - -if [ "${#TEST_NAMES[@]}" -eq 0 ]; then - TEST_NAMES=$(find acir_tests/bench_* -maxdepth 0 -type d -printf '%f ') -fi - -for TEST in ${TEST_NAMES[@]}; do - for HC in ${THREADS[@]}; do - HARDWARE_CONCURRENCY=$HC BENCHMARK_FD=3 ./run_acir_tests.sh $TEST 3>>$BENCHMARKS - done -done - -# Build results into string with \n delimited rows and space delimited values. -TABLE_DATA="" -for TEST in ${TEST_NAMES[@]}; do - GATE_COUNT=$(jq -r --arg test "$TEST" 'select(.eventName == "gate_count" and .acir_test == $test) | .value' $BENCHMARKS | uniq) - SUBGROUP_SIZE=$(jq -r --arg test "$TEST" 'select(.eventName == "subgroup_size" and .acir_test == $test) | .value' $BENCHMARKS | uniq) - # Name in col 1, gate count in col 2, subgroup size in col 3. - TABLE_DATA+="$TEST $GATE_COUNT $SUBGROUP_SIZE" - # Each thread timing in subsequent cols. - for HC in "${THREADS[@]}"; do - RESULT=$(cat $BENCHMARKS | jq -r --arg test "$TEST" --argjson hc $HC 'select(.eventName == "proof_construction_time" and .acir_test == $test and .threads == $hc) | .value') - TABLE_DATA+=" $RESULT" - done - TABLE_DATA+=$'\n' -done - -# Trim the trailing newline. -TABLE_DATA="${TABLE_DATA%$'\n'}" - -echo -echo Table represents time in ms to build circuit and proof for each test on n threads. -echo Ignores proving key construction. -echo -# Use awk to print the table -echo -e "$TABLE_DATA" | awk -v threads="${THREADS[*]}" 'BEGIN { - split(threads, t, " "); - len_threads = length(t); - print "+--------------------------+------------+---------------+" genseparator(len_threads); - print "| Test | Gate Count | Subgroup Size |" genthreadheaders(t, len_threads); - print "+--------------------------+------------+---------------+" genseparator(len_threads); -} -{ - printf("| %-24s | %-10s | %-13s |", $1, $2, $3); - for (i = 4; i <= len_threads+3; i++) { - printf " %9s |", $(i); - } - print ""; -} -END { - print "+--------------------------+------------+---------------+" genseparator(len_threads); -} -function genseparator(len, res) { - for (i = 1; i <= len; i++) res = res "-----------+"; - return res; -} -function genthreadheaders(t, len, res) { - for (i = 1; i <= len; i++) res = res sprintf(" %9s |", t[i]); - return res; -} -' - -if [[ -z "${LOG_FILE}" ]]; then - rm $BENCHMARKS -fi diff --git a/barretenberg/acir_tests/scripts/browser_prove.sh b/barretenberg/acir_tests/scripts/browser_prove.sh new file mode 100755 index 000000000000..0608e6530554 --- /dev/null +++ b/barretenberg/acir_tests/scripts/browser_prove.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source +cd ../acir_tests/$1 +export BROWSER=$2 + +# Launch browser server +dump_fail \ + "cd ../../browser-test-app && ../node_modules/.bin/serve -n -L -p 8080 -c ../serve.json dest" > /dev/null & +while ! nc -z localhost 8080 &>/dev/null; do sleep 1; done; + +# Use the browser binary for the test +../../headless-test/bb.js.browser prove_and_verify -b target/program.json -v diff --git a/barretenberg/acir_tests/scripts/run_bench.sh b/barretenberg/acir_tests/scripts/run_bench.sh index c63a79875f75..9674ff4115c4 100755 --- a/barretenberg/acir_tests/scripts/run_bench.sh +++ b/barretenberg/acir_tests/scripts/run_bench.sh @@ -14,6 +14,7 @@ mkdir -p ./bench-out bash -c "$cmd" 2>&1 | \ tee /dev/stderr | grep "mem: " | - tail -1 | sed -e 's/.*mem: \([0-9.]\+\).*/\1/' | + sort -un | + tail -n 1 | jq -n --arg name $name '[{name: $name, value: input, unit: "MiB"}]' > ./bench-out/$name.bench.json diff --git a/barretenberg/acir_tests/scripts/run_test.sh b/barretenberg/acir_tests/scripts/run_test.sh deleted file mode 100755 index 721456345094..000000000000 --- a/barretenberg/acir_tests/scripts/run_test.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env bash -source $(git rev-parse --show-toplevel)/ci3/source - -cd .. - -TEST_NAME=$1 - -COMPILE=${COMPILE:-0} -native_build_dir=$(../cpp/scripts/native-preset-build-dir) -export BIN=$(realpath ${BIN:-../cpp/$native_build_dir/bin/bb}) -export CRS_PATH=${CRS_PATH:-$HOME/.bb-crs} -FLOW=${FLOW:-prove_and_verify} -export RECURSIVE=${RECURSIVE:-false} -export HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-8} -RAYON_NUM_THREADS=${RAYON_NUM_THREADS:-8} -export VERBOSE=${VERBOSE:-0} - -flow_script=$(realpath ./flows/${FLOW}.sh) -nargo=$(realpath ../../noir/noir-repo/target/release/nargo) - - -cd ./acir_tests/$TEST_NAME - -if [ "$COMPILE" -ne 0 ]; then - echo -n "$TEST_NAME compiling... " - export RAYON_NUM_THREADS=${RAYON_NUM_THREADS:-4} - rm -rf target - set +e - compile_output=$($nargo compile --silence-warnings 2>&1 && $nargo execute 2>&1) - result=$? - set -e - if [ "$result" -ne 0 ]; then - echo "failed." - echo "$compile_output" - exit $result - fi - mv ./target/$TEST_NAME.json ./target/program.json - mv ./target/$TEST_NAME.gz ./target/witness.gz - if [ "$COMPILE" -eq 2 ]; then - echo "done." - exit 0 - fi -fi - -if [[ ( ! -f ./target/program.json && ! -f ./target/acir.msgpack ) || \ - ( ! -f ./target/witness.gz && ! -f ./target/witness.msgpack ) ]]; then - echo -e "\033[33mSKIPPED\033[0m (uncompiled)" - exit 0; -fi - -set +e -SECONDS=0 -if [ "$VERBOSE" -eq 1 ]; then - output=$($flow_script 2>&1 | tee /dev/stderr) -else - output=$($flow_script 2>&1) -fi -result=$? -duration=$SECONDS -set -e - -if [ $result -eq 0 ]; then - echo -e "${green}PASSED${reset} (${duration}s)" -else - [ "$VERBOSE" -eq 0 ] && echo "$output" - echo -e "${red}FAILED${reset} (${duration}s)" - exit 1 -fi diff --git a/barretenberg/acir_tests/scripts/run_test_browser.sh b/barretenberg/acir_tests/scripts/run_test_browser.sh deleted file mode 100755 index 6ccf6335886d..000000000000 --- a/barretenberg/acir_tests/scripts/run_test_browser.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash -source $(git rev-parse --show-toplevel)/ci3/source - -test=$1 - -cd .. - -../../ci3/dump_fail \ - "cd browser-test-app && ../node_modules/.bin/serve -n -L -p 8080 -c ../serve.json dest" > /dev/null & -while ! nc -z localhost 8080 &>/dev/null; do sleep 1; done; -BIN=./headless-test/bb.js.browser ./scripts/run_test.sh $test diff --git a/barretenberg/acir_tests/sol-test/src/index.js b/barretenberg/acir_tests/sol-test/src/index.js index 3d5e0061ce82..338797f26d09 100644 --- a/barretenberg/acir_tests/sol-test/src/index.js +++ b/barretenberg/acir_tests/sol-test/src/index.js @@ -5,8 +5,6 @@ import { ethers } from "ethers"; import solc from "solc"; // Size excluding number of public inputs -const NUMBER_OF_FIELDS_IN_PLONK_PROOF = 93; - const NUMBER_OF_ELEMENTS_IN_HONK_PROOF = 457; const NUMBER_OF_ELEMENTS_IN_HONK_ZK_PROOF = 508; @@ -57,8 +55,6 @@ const [test, verifier] = await Promise.all([ fsPromises.readFile(verifierPath, encoding), ]); -// If testing honk is set, then we compile the honk test suite -const testingHonk = getEnvVarCanBeUndefined("TESTING_HONK"); const hasZK = getEnvVarCanBeUndefined("HAS_ZK"); export const compilationInput = { @@ -89,26 +85,9 @@ export const compilationInput = { }, }; -const NUMBER_OF_FIELDS_IN_PROOF = testingHonk - ? hasZK - ? NUMBER_OF_ELEMENTS_IN_HONK_ZK_PROOF - : NUMBER_OF_ELEMENTS_IN_HONK_PROOF - : NUMBER_OF_FIELDS_IN_PLONK_PROOF; -if (!testingHonk) { - const keyPath = getEnvVar("KEY_PATH"); - const basePath = getEnvVar("BASE_PATH"); - const [key, base] = await Promise.all([ - fsPromises.readFile(keyPath, encoding), - fsPromises.readFile(basePath, encoding), - ]); - - compilationInput.sources["BaseUltraVerifier.sol"] = { - content: base, - }; - compilationInput.sources["Key.sol"] = { - content: key, - }; -} +const NUMBER_OF_FIELDS_IN_PROOF = hasZK + ? NUMBER_OF_ELEMENTS_IN_HONK_ZK_PROOF + : NUMBER_OF_ELEMENTS_IN_HONK_PROOF; var output = JSON.parse(solc.compile(JSON.stringify(compilationInput))); @@ -282,7 +261,7 @@ try { let finalBytecode = bytecode; // Deploy ZKTranscript library if needed and link it - if (testingHonk && hasZK) { + if (hasZK) { // Check if there's a library placeholder in the bytecode const libraryPlaceholder = /__\$[a-fA-F0-9]{34}\$__/; if (libraryPlaceholder.test(bytecode)) { @@ -310,26 +289,22 @@ try { if (!result) throw new Error("Test failed"); } catch (e) { console.error(testName, "failed"); - if (testingHonk) { - var errorType = e.data; - switch (errorType) { - case WRONG_PROOF_LENGTH: - throw new Error( - "Proof length wrong. Possibile culprits: the NUMBER_OF_FIELDS_IN_* constants; number of public inputs; proof surgery; zk/non-zk discrepancy." - ); - case WRONG_PUBLIC_INPUTS_LENGTH: - throw new Error("Number of inputs in the proof is wrong"); - case SUMCHECK_FAILED: - throw new Error("Sumcheck round failed"); - case SHPLEMINI_FAILED: - throw new Error("PCS round failed"); - case CONSISTENCY_FAILED: - throw new Error("ZK contract: Subgroup IPA consistency check error"); - case GEMINI_CHALLENGE_IN_SUBGROUP: - throw new Error("ZK contract: Gemini challenge error"); - default: - throw e; - } + var errorType = e.data; + switch (errorType) { + case WRONG_PROOF_LENGTH: + throw new Error( + "Proof length wrong. Possibile culprits: the NUMBER_OF_FIELDS_IN_* constants; number of public inputs; proof surgery; zk/non-zk discrepancy." + ); + case WRONG_PUBLIC_INPUTS_LENGTH: + throw new Error("Number of inputs in the proof is wrong"); + case SUMCHECK_FAILED: + throw new Error("Sumcheck round failed"); + case SHPLEMINI_FAILED: + throw new Error("PCS round failed"); + case CONSISTENCY_FAILED: + throw new Error("ZK contract: Subgroup IPA consistency check error"); + case GEMINI_CHALLENGE_IN_SUBGROUP: + throw new Error("ZK contract: Gemini challenge error"); } throw e; } finally { diff --git a/barretenberg/cpp/scripts/find-bb b/barretenberg/cpp/scripts/find-bb new file mode 100755 index 000000000000..2f66aff9db36 --- /dev/null +++ b/barretenberg/cpp/scripts/find-bb @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +source $(git rev-parse --show-toplevel)/ci3/source + +echo $root/barretenberg/cpp/$(./native-preset-build-dir)/bin/bb diff --git a/barretenberg/cpp/src/barretenberg/api/api.hpp b/barretenberg/cpp/src/barretenberg/api/api.hpp index 02bba6e9f349..c11ca54950c7 100644 --- a/barretenberg/cpp/src/barretenberg/api/api.hpp +++ b/barretenberg/cpp/src/barretenberg/api/api.hpp @@ -14,8 +14,6 @@ class API { // zero knowledge variant of the protocol by default std::filesystem::path crs_path{ "" }; // the location of reference strings for commitment schemes bool recursive{ false }; // deprecated flag indicating that a circuit is to be recursively verified - bool init_kzg_accumulator{ false }; // stripped down version fo `recursive` in the UltraHonk; also deprecated? - uint32_t honk_recursion{ 0 }; // flag that differentiates between different recursion modes; deprecated? bool ipa_accumulation{ false }; // indicate whether the command is doing IPA proof aggregation std::string scheme; // the proving system or IVC scheme std::string oracle_hash_type; // which hash function does the prover use as a random oracle? @@ -34,9 +32,6 @@ class API { << " debug: " << flags.debug << "\n" << " disable_zk: " << flags.disable_zk << "\n" << " crs_path: " << flags.crs_path << "\n" - << " recursive: " << flags.recursive << "\n" - << " init_kzg_accumulator: " << flags.init_kzg_accumulator << "\n" - << " honk_recursion: " << flags.honk_recursion << "\n" << " ipa_accumulation: " << flags.ipa_accumulation << "\n" << " scheme: " << flags.scheme << "\n" << " oracle_hash_type: " << flags.oracle_hash_type << "\n" diff --git a/barretenberg/cpp/src/barretenberg/api/api_ultra_honk.cpp b/barretenberg/cpp/src/barretenberg/api/api_ultra_honk.cpp index bf13a0f505d2..f77ff9f16b61 100644 --- a/barretenberg/cpp/src/barretenberg/api/api_ultra_honk.cpp +++ b/barretenberg/cpp/src/barretenberg/api/api_ultra_honk.cpp @@ -253,59 +253,4 @@ void UltraHonkAPI::write_solidity_verifier(const Flags& flags, } } } - -template -void write_recursion_inputs_ultra_honk(const std::string& bytecode_path, - const std::string& witness_path, - const std::string& output_path) -{ - // Read input files directly as bytes - auto bytecode = get_bytecode(bytecode_path); - auto witness = get_bytecode(witness_path); - - // Determine settings based on flavor - bbapi::ProofSystemSettings settings; - if constexpr (IsAnyOf) { - settings.ipa_accumulation = true; - } - - // Get VK first (needed for proving) - auto vk_response = - bbapi::CircuitComputeVk{ .circuit = { .name = "circuit", .bytecode = bytecode }, .settings = settings } - .execute(); - - // Execute prove with the VK - auto prove_response = bbapi::CircuitProve{ .circuit = { .name = "circuit", - .bytecode = std::move(bytecode), - .verification_key = std::move(vk_response.bytes) }, - .witness = std::move(witness), - .settings = settings } - .execute(); - - // Reconstruct full proof with public inputs - std::vector proof; - proof.reserve(prove_response.public_inputs.size() + prove_response.proof.size()); - proof.insert(proof.end(), prove_response.public_inputs.begin(), prove_response.public_inputs.end()); - proof.insert(proof.end(), prove_response.proof.begin(), prove_response.proof.end()); - - // Deserialize VK for ProofSurgeon - auto verification_key = std::make_shared( - from_buffer(vk_response.bytes)); - - // Generate TOML content - const std::string toml_content = acir_format::ProofSurgeon::construct_recursion_inputs_toml_data( - proof, verification_key, settings.ipa_accumulation); - - // Write to file - const std::string toml_path = output_path + "/Prover.toml"; - write_file(toml_path, { toml_content.begin(), toml_content.end() }); -} - -template void write_recursion_inputs_ultra_honk(const std::string& bytecode_path, - const std::string& witness_path, - const std::string& output_path); - -template void write_recursion_inputs_ultra_honk(const std::string& bytecode_path, - const std::string& witness_path, - const std::string& output_path); } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/api/gate_count.hpp b/barretenberg/cpp/src/barretenberg/api/gate_count.hpp deleted file mode 100644 index b2e7fee03d9a..000000000000 --- a/barretenberg/cpp/src/barretenberg/api/gate_count.hpp +++ /dev/null @@ -1,66 +0,0 @@ -#pragma once -#include "barretenberg/api/acir_format_getters.hpp" -#include "barretenberg/constants.hpp" - -namespace bb { -/** - * @brief Computes the number of Barretenberg specific gates needed to create a proof for the specific ACIR circuit. - * - * Communication: - * - stdout: A JSON string of the number of ACIR opcodes and final backend circuit size. - * TODO(https://github.com/AztecProtocol/barretenberg/issues/1126): split this into separate Plonk and Honk functions as - * their gate count differs - * - * @param bytecode_path Path to the file containing the serialized circuit - */ -template -void gate_count(const std::string& bytecode_path, - bool recursive, - uint32_t honk_recursion, - bool include_gates_per_opcode) -{ - // All circuit reports will be built into the string below - std::string functions_string = "{\"functions\": [\n "; - auto constraint_systems = get_constraint_systems(bytecode_path); - - const acir_format::ProgramMetadata metadata{ .recursive = recursive, - .honk_recursion = honk_recursion, - .collect_gates_per_opcode = include_gates_per_opcode }; - size_t i = 0; - for (const auto& constraint_system : constraint_systems) { - acir_format::AcirProgram program{ constraint_system }; - auto builder = acir_format::create_circuit(program, metadata); - builder.finalize_circuit(/*ensure_nonzero=*/true); - size_t circuit_size = builder.get_finalized_total_circuit_size(); - vinfo("Calculated circuit size in gate_count: ", circuit_size); - - // Build individual circuit report - std::string gates_per_opcode_str; - for (size_t j = 0; j < program.constraints.gates_per_opcode.size(); j++) { - gates_per_opcode_str += std::to_string(program.constraints.gates_per_opcode[j]); - if (j != program.constraints.gates_per_opcode.size() - 1) { - gates_per_opcode_str += ","; - } - } - - auto result_string = format( - "{\n \"acir_opcodes\": ", - program.constraints.num_acir_opcodes, - ",\n \"circuit_size\": ", - circuit_size, - (include_gates_per_opcode ? format(",\n \"gates_per_opcode\": [", gates_per_opcode_str, "]") : ""), - "\n }"); - - // Attach a comma if there are more circuit reports to generate - if (i != (constraint_systems.size() - 1)) { - result_string = format(result_string, ","); - } - - functions_string = format(functions_string, result_string); - - i++; - } - std::cout << format(functions_string, "\n]}"); -} - -} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/bb/cli.cpp b/barretenberg/cpp/src/barretenberg/bb/cli.cpp index 2767af2ce578..090cc2a2391f 100644 --- a/barretenberg/cpp/src/barretenberg/bb/cli.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/cli.cpp @@ -19,7 +19,6 @@ #include "barretenberg/api/api_msgpack.hpp" #include "barretenberg/api/api_ultra_honk.hpp" #include "barretenberg/api/file_io.hpp" -#include "barretenberg/api/gate_count.hpp" #include "barretenberg/api/prove_tube.hpp" #include "barretenberg/bb/cli11_formatter.hpp" #include "barretenberg/bbapi/bbapi.hpp" @@ -156,18 +155,9 @@ int parse_and_run_cli_command(int argc, char* argv[]) * Subcommand: Adders for options that we will create for more than one subcommand ***************************************************************************************************************/ - const auto add_recursive_flag = [&](CLI::App* subcommand) { + const auto add_ipa_accumulation_flag = [&](CLI::App* subcommand) { return subcommand->add_flag( - "--recursive", flags.recursive, "Do some things relating to recursive verification and KZG..."); - }; - - const auto add_honk_recursion_option = [&](CLI::App* subcommand) { - return subcommand->add_option( - "--honk_recursion", - flags.honk_recursion, - "Instruct the prover that this circuit will be recursively verified with " - "UltraHonk (1) or with UltraRollupHonk (2). Ensures a pairing point accumulator " - "(and additionally an IPA claim when UltraRollupHonk) is added to the public inputs of the proof."); + "--ipa_accumulation", flags.ipa_accumulation, "Accumulate/Aggregate IPA (Inner Product Argument) claims"); }; const auto add_scheme_option = [&](CLI::App* subcommand) { @@ -219,22 +209,12 @@ int parse_and_run_cli_command(int argc, char* argv[]) return subcommand->add_flag("--write_vk", flags.write_vk, "Write the provided circuit's verification key"); }; - const auto add_ipa_accumulation_flag = [&](CLI::App* subcommand) { - return subcommand->add_flag( - "--ipa_accumulation", flags.ipa_accumulation, "Accumulate/Aggregate IPA (Inner Product Argument) claims"); - }; - const auto remove_zk_option = [&](CLI::App* subcommand) { return subcommand->add_flag("--disable_zk", flags.disable_zk, "Use a non-zk version of --scheme. This flag is set to false by default."); }; - const auto add_init_kzg_accumulator_option = [&](CLI::App* subcommand) { - return subcommand->add_flag( - "--init_kzg_accumulator", flags.init_kzg_accumulator, "Initialize pairing point accumulator."); - }; - const auto add_bytecode_path_option = [&](CLI::App* subcommand) { subcommand->add_option("--bytecode_path, -b", bytecode_path, "Path to ACIR bytecode generated by Noir.") /* ->check(CLI::ExistingFile) OR stdin indicator - */; @@ -349,7 +329,6 @@ int parse_and_run_cli_command(int argc, char* argv[]) add_scheme_option(gates); add_verbose_flag(gates); add_bytecode_path_option(gates); - add_honk_recursion_option(gates); add_include_gates_per_opcode_flag(gates); /*************************************************************************************************************** @@ -363,18 +342,14 @@ int parse_and_run_cli_command(int argc, char* argv[]) add_output_path_option(prove, output_path); add_ivc_inputs_path_options(prove); add_vk_path_option(prove); - add_verbose_flag(prove); add_debug_flag(prove); add_crs_path_option(prove); add_oracle_hash_option(prove); add_output_format_option(prove); add_write_vk_flag(prove); - remove_zk_option(prove); - add_init_kzg_accumulator_option(prove); add_ipa_accumulation_flag(prove); - add_recursive_flag(prove); - add_honk_recursion_option(prove); + remove_zk_option(prove); add_slow_low_memory_flag(prove); add_print_op_counts_flag(prove); add_op_counts_out_option(prove); @@ -399,11 +374,8 @@ int parse_and_run_cli_command(int argc, char* argv[]) add_debug_flag(write_vk); add_output_format_option(write_vk); add_crs_path_option(write_vk); - add_init_kzg_accumulator_option(write_vk); add_oracle_hash_option(write_vk); add_ipa_accumulation_flag(write_vk); - add_honk_recursion_option(write_vk); - add_recursive_flag(write_vk); add_verifier_type_option(write_vk)->default_val("standalone"); remove_zk_option(write_vk); @@ -423,9 +395,6 @@ int parse_and_run_cli_command(int argc, char* argv[]) add_oracle_hash_option(verify); remove_zk_option(verify); add_ipa_accumulation_flag(verify); - add_init_kzg_accumulator_option(verify); - add_honk_recursion_option(verify); - add_recursive_flag(verify); /*************************************************************************************************************** * Subcommand: write_solidity_verifier @@ -449,26 +418,6 @@ int parse_and_run_cli_command(int argc, char* argv[]) ***************************************************************************************************************/ CLI::App* OLD_API = app.add_subcommand("OLD_API", "Access some old API commands"); - /*************************************************************************************************************** - * Subcommand: OLD_API gates_for_ivc - ***************************************************************************************************************/ - CLI::App* OLD_API_gates_for_ivc = OLD_API->add_subcommand("gates_for_ivc", ""); - add_verbose_flag(OLD_API_gates_for_ivc); - add_debug_flag(OLD_API_gates_for_ivc); - add_crs_path_option(OLD_API_gates_for_ivc); - add_bytecode_path_option(OLD_API_gates_for_ivc); - - /*************************************************************************************************************** - * Subcommand: OLD_API gates_mega_honk - ***************************************************************************************************************/ - CLI::App* OLD_API_gates_mega_honk = OLD_API->add_subcommand("gates_mega_honk", ""); - add_verbose_flag(OLD_API_gates_mega_honk); - add_debug_flag(OLD_API_gates_mega_honk); - add_crs_path_option(OLD_API_gates_mega_honk); - add_recursive_flag(OLD_API_gates_mega_honk); - add_honk_recursion_option(OLD_API_gates_mega_honk); - add_bytecode_path_option(OLD_API_gates_mega_honk); - /*************************************************************************************************************** * Subcommand: OLD_API write_arbitrary_valid_client_ivc_proof_and_vk_to_file ***************************************************************************************************************/ @@ -480,20 +429,6 @@ int parse_and_run_cli_command(int argc, char* argv[]) std::string arbitrary_valid_proof_path{ "./proofs/proof" }; add_output_path_option(OLD_API_write_arbitrary_valid_client_ivc_proof_and_vk_to_file, arbitrary_valid_proof_path); - /*************************************************************************************************************** - * Subcommand: OLD_API write_recursion_inputs_ultra_honk - ***************************************************************************************************************/ - CLI::App* OLD_API_write_recursion_inputs_ultra_honk = - OLD_API->add_subcommand("write_recursion_inputs_ultra_honk", ""); - add_verbose_flag(OLD_API_write_recursion_inputs_ultra_honk); - add_debug_flag(OLD_API_write_recursion_inputs_ultra_honk); - add_crs_path_option(OLD_API_write_recursion_inputs_ultra_honk); - std::string recursion_inputs_output_path{ "./target" }; - add_output_path_option(OLD_API_write_recursion_inputs_ultra_honk, recursion_inputs_output_path); - add_ipa_accumulation_flag(OLD_API_write_recursion_inputs_ultra_honk); - add_recursive_flag(OLD_API_write_recursion_inputs_ultra_honk); - add_bytecode_path_option(OLD_API_write_recursion_inputs_ultra_honk); - /*************************************************************************************************************** * Subcommand: OLD_API gates ***************************************************************************************************************/ @@ -501,8 +436,6 @@ int parse_and_run_cli_command(int argc, char* argv[]) add_verbose_flag(OLD_API_gates); add_debug_flag(OLD_API_gates); add_crs_path_option(OLD_API_gates); - add_recursive_flag(OLD_API_gates); - add_honk_recursion_option(OLD_API_gates); add_bytecode_path_option(OLD_API_gates); /*************************************************************************************************************** @@ -515,7 +448,6 @@ int parse_and_run_cli_command(int argc, char* argv[]) add_bytecode_path_option(OLD_API_verify); add_proof_path_option(OLD_API_verify); add_vk_path_option(OLD_API_verify); - add_recursive_flag(OLD_API_verify); /*************************************************************************************************************** * Subcommand: OLD_API prove_and_verify @@ -524,7 +456,6 @@ int parse_and_run_cli_command(int argc, char* argv[]) add_verbose_flag(OLD_API_prove_and_verify); add_debug_flag(OLD_API_prove_and_verify); add_crs_path_option(OLD_API_prove_and_verify); - add_recursive_flag(OLD_API_prove_and_verify); add_bytecode_path_option(OLD_API_prove_and_verify); std::filesystem::path avm_inputs_path{ "./target/avm_inputs.bin" }; @@ -713,25 +644,10 @@ int parse_and_run_cli_command(int argc, char* argv[]) throw_or_abort("The Aztec Virtual Machine (AVM) is disabled in this environment!"); } #endif - // CLIENT IVC EXTRA COMMAND - else if (OLD_API_gates_for_ivc->parsed()) { - gate_count_for_ivc(bytecode_path, true); - } else if (OLD_API_gates_mega_honk->parsed()) { - gate_count(bytecode_path, flags.recursive, flags.honk_recursion, true); - } else if (OLD_API_write_arbitrary_valid_client_ivc_proof_and_vk_to_file->parsed()) { + else if (OLD_API_write_arbitrary_valid_client_ivc_proof_and_vk_to_file->parsed()) { write_arbitrary_valid_client_ivc_proof_and_vk_to_file(arbitrary_valid_proof_path); return 0; } - // ULTRA HONK EXTRA COMMANDS - else if (OLD_API_write_recursion_inputs_ultra_honk->parsed()) { - if (flags.ipa_accumulation) { - write_recursion_inputs_ultra_honk( - bytecode_path, witness_path, recursion_inputs_output_path); - } else { - write_recursion_inputs_ultra_honk( - bytecode_path, witness_path, recursion_inputs_output_path); - } - } // NEW STANDARD API // NOTE(AD): We likely won't really have a standard API if our main flavours are UH or CIVC, with CIVC so // different diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm2_recursion_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm2_recursion_constraint.cpp index 057a190da2a8..7664af43e8d6 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm2_recursion_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm2_recursion_constraint.cpp @@ -50,6 +50,8 @@ void create_dummy_vkey_and_proof(Builder& builder, const std::vector& key_fields, const std::vector& proof_fields) { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1514): restructure this function to use functions from + // mock_verifier_inputs using Flavor = avm2::AvmFlavor; // a lambda that sets dummy commitments diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp index 379aee0322b0..47a7e2b01dec 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp @@ -7,6 +7,7 @@ #include "honk_recursion_constraint.hpp" #include "barretenberg/common/assert.hpp" #include "barretenberg/constants.hpp" +#include "barretenberg/dsl/acir_format/mock_verifier_inputs.hpp" #include "barretenberg/flavor/flavor.hpp" #include "barretenberg/flavor/ultra_recursive_flavor.hpp" #include "barretenberg/flavor/ultra_rollup_recursive_flavor.hpp" @@ -54,148 +55,37 @@ void create_dummy_vkey_and_proof(typename Flavor::CircuitBuilder& builder, { using Builder = typename Flavor::CircuitBuilder; using NativeFlavor = typename Flavor::NativeFlavor; - - static constexpr size_t IPA_CLAIM_SIZE = stdlib::recursion::honk::RollupIO::IpaClaim::PUBLIC_INPUTS_SIZE; + using IO = std::conditional_t, + stdlib::recursion::honk::RollupIO, + stdlib::recursion::honk::DefaultIO>; // Set vkey->circuit_size correctly based on the proof size BB_ASSERT_EQ(proof_size, NativeFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS()); - // a lambda that sets dummy commitments - auto set_dummy_commitment = [&builder](const std::vector& fields, size_t& offset) { - auto comm = curve::BN254::AffineElement::one() * fr::random_element(); - auto frs = field_conversion::convert_to_bn254_frs(comm); - builder.set_variable(fields[offset].witness_index, frs[0]); - builder.set_variable(fields[offset + 1].witness_index, frs[1]); - builder.set_variable(fields[offset + 2].witness_index, frs[2]); - builder.set_variable(fields[offset + 3].witness_index, frs[3]); - offset += 4; - }; - // a lambda that sets dummy evaluation in proof fields vector - auto set_dummy_evaluation_in_proof_fields = [&](size_t& offset) { - builder.set_variable(proof_fields[offset].witness_index, fr::random_element()); - offset++; - }; - // Note: this computation should always result in log_circuit_size = CONST_PROOF_SIZE_LOG_N - auto log_circuit_size = CONST_PROOF_SIZE_LOG_N; - size_t offset = 0; - // First key field is circuit size - builder.set_variable(key_fields[offset++].witness_index, 1 << log_circuit_size); - // Second key field is number of public inputs - builder.set_variable(key_fields[offset++].witness_index, public_inputs_size); - // Third key field is the pub inputs offset + size_t num_inner_public_inputs = public_inputs_size - IO::PUBLIC_INPUTS_SIZE; uint32_t pub_inputs_offset = NativeFlavor::has_zero_row ? 1 : 0; - builder.set_variable(key_fields[offset++].witness_index, pub_inputs_offset); - size_t num_inner_public_inputs = HasIPAAccumulator ? public_inputs_size - bb::RollupIO::PUBLIC_INPUTS_SIZE - : public_inputs_size - bb::DefaultIO::PUBLIC_INPUTS_SIZE; - - for (size_t i = 0; i < Flavor::NUM_PRECOMPUTED_ENTITIES; ++i) { - set_dummy_commitment(key_fields, offset); - } - offset = 0; // Reset offset for parsing proof fields + // Generate mock honk vk + // Note: log_circuit_size = CONST_PROOF_SIZE_LOG_N + auto honk_vk = create_mock_honk_vk( + 1 << CONST_PROOF_SIZE_LOG_N, pub_inputs_offset, num_inner_public_inputs); - // the inner public inputs - for (size_t i = 0; i < num_inner_public_inputs; i++) { - set_dummy_evaluation_in_proof_fields(offset); - } + size_t offset = 0; - // Get some values for a valid aggregation object and use them here to avoid divide by 0 or other issues. - std::array::PUBLIC_INPUTS_SIZE> dummy_pairing_points_values = - PairingPoints::construct_dummy(); - for (size_t i = 0; i < PairingPoints::PUBLIC_INPUTS_SIZE; i++) { - builder.set_variable(proof_fields[offset].witness_index, dummy_pairing_points_values[i]); + // Set honk vk in builder + for (auto& vk_element : honk_vk->to_field_elements()) { + builder.set_variable(key_fields[offset].witness_index, vk_element); offset++; } - // IPA claim - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1392): Don't use random elements here. - if constexpr (HasIPAAccumulator) { - for (size_t i = 0; i < IPA_CLAIM_SIZE; i++) { - set_dummy_evaluation_in_proof_fields(offset); - } - } - - // first NUM_WITNESS_ENTITIES witness commitments - for (size_t i = 0; i < Flavor::NUM_WITNESS_ENTITIES; i++) { - set_dummy_commitment(proof_fields, offset); - } - - if constexpr (Flavor::HasZK) { - // Libra concatenation commitment - set_dummy_commitment(proof_fields, offset); - // libra sum - set_dummy_evaluation_in_proof_fields(offset); - } + // Generate dummy honk proof + bb::HonkProof honk_proof = create_mock_honk_proof(num_inner_public_inputs); - // now the univariates, which can just be 0s (8*CONST_PROOF_SIZE_LOG_N Frs, where 8 is the maximum relation - // degree) - for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N * Flavor::BATCHED_RELATION_PARTIAL_LENGTH; i++) { - set_dummy_evaluation_in_proof_fields(offset); - } - - // now the sumcheck evaluations, which is just 44 0s - for (size_t i = 0; i < Flavor::NUM_ALL_ENTITIES; i++) { - set_dummy_evaluation_in_proof_fields(offset); - } - - if constexpr (Flavor::HasZK) { - // Libra claimed evaluation - set_dummy_evaluation_in_proof_fields(offset); - // Libra grand sum commitment - set_dummy_commitment(proof_fields, offset); - // Libra quotient commitment - set_dummy_commitment(proof_fields, offset); - // Gemini masking commitment - set_dummy_commitment(proof_fields, offset); - // Gemini masking evaluation - set_dummy_evaluation_in_proof_fields(offset); - } - - // now the gemini fold commitments which are CONST_PROOF_SIZE_LOG_N - 1 - for (size_t i = 1; i < CONST_PROOF_SIZE_LOG_N; i++) { - set_dummy_commitment(proof_fields, offset); - } - - // the gemini fold evaluations which are also CONST_PROOF_SIZE_LOG_N - for (size_t i = 1; i <= CONST_PROOF_SIZE_LOG_N; i++) { - set_dummy_evaluation_in_proof_fields(offset); - } - - if constexpr (Flavor::HasZK) { - // NUM_SMALL_IPA_EVALUATIONS libra evals - for (size_t i = 0; i < NUM_SMALL_IPA_EVALUATIONS; i++) { - set_dummy_evaluation_in_proof_fields(offset); - } - } - - // lastly the shplonk batched quotient commitment and kzg quotient commitment - for (size_t i = 0; i < 2; i++) { - set_dummy_commitment(proof_fields, offset); - } - // IPA Proof - if constexpr (HasIPAAccumulator) { - // Ls and Rs - for (size_t i = 0; i < static_cast(2) * CONST_ECCVM_LOG_N; i++) { - auto comm = curve::Grumpkin::AffineElement::one() * fq::random_element(); - auto frs = field_conversion::convert_to_bn254_frs(comm); - builder.set_variable(proof_fields[offset].witness_index, frs[0]); - builder.set_variable(proof_fields[offset + 1].witness_index, frs[1]); - offset += 2; - } - - // G_zero - auto G_zero = curve::Grumpkin::AffineElement::one() * fq::random_element(); - auto G_zero_frs = field_conversion::convert_to_bn254_frs(G_zero); - builder.set_variable(proof_fields[offset].witness_index, G_zero_frs[0]); - builder.set_variable(proof_fields[offset + 1].witness_index, G_zero_frs[1]); - offset += 2; - - // a_zero - auto a_zero = fq::random_element(); - auto a_zero_frs = field_conversion::convert_to_bn254_frs(a_zero); - builder.set_variable(proof_fields[offset].witness_index, a_zero_frs[0]); - builder.set_variable(proof_fields[offset + 1].witness_index, a_zero_frs[1]); - offset += 2; + offset = 0; + // Set honk proof in builder + for (auto& proof_element : honk_proof) { + builder.set_variable(proof_fields[offset].witness_index, proof_element); + offset++; } BB_ASSERT_EQ(offset, proof_size + public_inputs_size); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp index 6b0477d464ff..2bc2df39fe51 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp @@ -255,15 +255,6 @@ class IvcRecursionConstraintTest : public ::testing::Test { void SetUp() override { bb::srs::init_file_crs_factory(bb::srs::bb_crs_path()); } }; -/** - * @brief Check that the size of a mock merge proof matches expectation - */ -TEST_F(IvcRecursionConstraintTest, MockMergeProofSize) -{ - Goblin::MergeProof merge_proof = create_mock_merge_proof(); - EXPECT_EQ(merge_proof.size(), MERGE_PROOF_SIZE); -} - /** * @brief Test IVC accumulation of a one app and one kernel; The kernel includes a recursive oink verification for the * app, specified via an ACIR RecursionConstraint. diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.cpp index 862c4d621a40..a447c28d26c1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.cpp @@ -27,9 +27,10 @@ using namespace bb; * @param fields field buffer to append mock commitment values to * @param num_commitments number of mock commitments to append */ +template void populate_field_elements_for_mock_commitments(std::vector& fields, const size_t& num_commitments) { - auto mock_commitment = curve::BN254::AffineElement::one(); + auto mock_commitment = Curve::AffineElement::one(); std::vector mock_commitment_frs = field_conversion::convert_to_bn254_frs(mock_commitment); for (size_t i = 0; i < num_commitments; ++i) { for (const fr& val : mock_commitment_frs) { @@ -38,18 +39,43 @@ void populate_field_elements_for_mock_commitments(std::vector& fields, const } } +/** + * @brief Helper to populate a field buffer with some number of field elements + * + * @param fields field buffer to append field elements to + * @param num_elements number of mock field elements to append + * @param value optional mock value appended + */ +template +void populate_field_elements(std::vector& fields, + const size_t& num_elements, + std::optional value = std::nullopt) +{ + for (size_t i = 0; i < num_elements; ++i) { + std::vector field_elements = value.has_value() + ? field_conversion::convert_to_bn254_frs(value.value()) + : field_conversion::convert_to_bn254_frs(FF::random_element()); + fields.insert(fields.end(), field_elements.begin(), field_elements.end()); + } +} + /** * @brief Create a mock oink proof that has the correct structure but is not in general valid * + * @param inner_public_inputs_size Number of public inputs coming from the ACIR constraints */ -template HonkProof create_mock_oink_proof() +template HonkProof create_mock_oink_proof(const size_t inner_public_inputs_size) { HonkProof proof; // Populate mock public inputs - typename Flavor::CircuitBuilder builder; + typename PublicInputs::Builder builder; PublicInputs::add_default(builder); + // Populate the proof with as many public inputs as required from the ACIR constraints + populate_field_elements(proof, inner_public_inputs_size); + + // Populate the proof with the public inputs added from barretenberg for (const auto& pub : builder.public_inputs()) { proof.emplace_back(builder.get_variable(pub)); } @@ -64,37 +90,69 @@ template HonkProof create_mock_oink_proof( * @brief Create a mock decider proof that has the correct structure but is not in general valid * */ -template HonkProof create_mock_decider_proof() +template HonkProof create_mock_decider_proof(const size_t const_proof_log_n) { - using FF = typename Flavor::FF; - + using FF = Flavor::FF; + using Curve = Flavor::Curve; HonkProof proof; - // Sumcheck univariates - const size_t TOTAL_SIZE_SUMCHECK_UNIVARIATES = CONST_PROOF_SIZE_LOG_N * Flavor::BATCHED_RELATION_PARTIAL_LENGTH; - for (size_t i = 0; i < TOTAL_SIZE_SUMCHECK_UNIVARIATES; ++i) { - proof.emplace_back(FF::random_element()); + if constexpr (Flavor::HasZK) { + // Libra concatenation commitment + populate_field_elements_for_mock_commitments(proof, 1); + + // Libra sum + populate_field_elements(proof, 1); } + // Sumcheck univariates + const size_t TOTAL_SIZE_SUMCHECK_UNIVARIATES = const_proof_log_n * Flavor::BATCHED_RELATION_PARTIAL_LENGTH; + populate_field_elements(proof, TOTAL_SIZE_SUMCHECK_UNIVARIATES); + // Sumcheck multilinear evaluations - for (size_t i = 0; i < Flavor::NUM_ALL_ENTITIES; ++i) { - proof.emplace_back(FF::random_element()); + populate_field_elements(proof, Flavor::NUM_ALL_ENTITIES); + + if constexpr (Flavor::HasZK) { + // Libra claimed evaluation + populate_field_elements(proof, 1); + + // Libra grand sum commitment + populate_field_elements_for_mock_commitments(proof, 1); + + // Libra quotient commitment + populate_field_elements_for_mock_commitments(proof, 1); + + // Gemini masking commitment + populate_field_elements_for_mock_commitments(proof, 1); + + // Gemini masking evaluation + populate_field_elements(proof, 1); } // Gemini fold commitments - const size_t NUM_GEMINI_FOLD_COMMITMENTS = CONST_PROOF_SIZE_LOG_N - 1; - populate_field_elements_for_mock_commitments(proof, NUM_GEMINI_FOLD_COMMITMENTS); + const size_t NUM_GEMINI_FOLD_COMMITMENTS = const_proof_log_n - 1; + populate_field_elements_for_mock_commitments(proof, NUM_GEMINI_FOLD_COMMITMENTS); // Gemini fold evaluations - const size_t NUM_GEMINI_FOLD_EVALUATIONS = CONST_PROOF_SIZE_LOG_N; - for (size_t i = 0; i < NUM_GEMINI_FOLD_EVALUATIONS; ++i) { - proof.emplace_back(FF::random_element()); + const size_t NUM_GEMINI_FOLD_EVALUATIONS = const_proof_log_n; + populate_field_elements(proof, NUM_GEMINI_FOLD_EVALUATIONS); + + if constexpr (std::is_same_v) { + // Gemini P pos evaluation + populate_field_elements(proof, 1); + + // Gemini P neg evaluation + populate_field_elements(proof, 1); + } + + if constexpr (Flavor::HasZK) { + // NUM_SMALL_IPA_EVALUATIONS libra evals + populate_field_elements(proof, NUM_SMALL_IPA_EVALUATIONS); } // Shplonk batched quotient commitment - populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); // KZG quotient commitment - populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); return proof; } @@ -102,16 +160,22 @@ template HonkProof create_mock_decider_proof() /** * @brief Create a mock honk proof that has the correct structure but is not in general valid * + * @param inner_public_inputs_size Number of public inputs coming from the ACIR constraints */ -template HonkProof create_mock_honk_proof() +template HonkProof create_mock_honk_proof(const size_t inner_public_inputs_size) { // Construct a Honk proof as the concatenation of an Oink proof and a Decider proof - HonkProof oink_proof = create_mock_oink_proof(); + HonkProof oink_proof = create_mock_oink_proof(inner_public_inputs_size); HonkProof decider_proof = create_mock_decider_proof(); HonkProof proof; proof.reserve(oink_proof.size() + decider_proof.size()); proof.insert(proof.end(), oink_proof.begin(), oink_proof.end()); proof.insert(proof.end(), decider_proof.begin(), decider_proof.end()); + + if constexpr (HasIPAAccumulator) { + HonkProof ipa_proof = create_mock_ipa_proof(); + proof.insert(proof.end(), ipa_proof.begin(), ipa_proof.end()); + } return proof; } @@ -125,15 +189,12 @@ template HonkProof create_mock_pg_proof() HonkProof proof = create_mock_oink_proof(); // Populate mock perturbator coefficients - for (size_t idx = 1; idx <= CONST_PG_LOG_N; idx++) { - proof.emplace_back(0); - } + populate_field_elements(proof, CONST_PG_LOG_N, /*value=*/fr::zero()); // Populate mock combiner quotient coefficients - for (size_t idx = DeciderProvingKeys_::NUM; idx < DeciderProvingKeys_::BATCHED_EXTENDED_LENGTH; - idx++) { - proof.emplace_back(0); - } + size_t NUM_COEFF_COMBINER_QUOTIENT = + DeciderProvingKeys_::BATCHED_EXTENDED_LENGTH - DeciderProvingKeys_::NUM; + populate_field_elements(proof, NUM_COEFF_COMBINER_QUOTIENT, /*value=*/fr::zero()); return proof; } @@ -145,18 +206,13 @@ template HonkProof create_mock_pg_proof() */ Goblin::MergeProof create_mock_merge_proof() { - using Flavor = MegaFlavor; - using FF = Flavor::FF; - - std::vector proof; + Goblin::MergeProof proof; proof.reserve(MERGE_PROOF_SIZE); - FF mock_val(5); - auto mock_commitment = curve::BN254::AffineElement::one(); - std::vector mock_commitment_frs = field_conversion::convert_to_bn254_frs(mock_commitment); + uint32_t mock_shift_size = 5; // Must be smaller than 32, otherwise pow raises an error - // Populate mock subtable size - proof.emplace_back(mock_val); + // Populate mock shift size + populate_field_elements(proof, 1, /*value=*/fr{ mock_shift_size }); // There are 8 entities in the merge protocol (4 columns x 2 components: T_j, g_j(X) = X^{l-1} t_j(X)) // and 8 evaluations (4 columns x 2 components: g_j(kappa), t_j(1/kappa)) @@ -164,43 +220,197 @@ Goblin::MergeProof create_mock_merge_proof() const size_t NUM_TRANSCRIPT_EVALUATIONS = 8; // Transcript poly commitments - for (size_t i = 0; i < NUM_TRANSCRIPT_ENTITIES; ++i) { - for (const FF& val : mock_commitment_frs) { - proof.emplace_back(val); - } - } + populate_field_elements_for_mock_commitments(proof, NUM_TRANSCRIPT_ENTITIES); + // Transcript poly evaluations - for (size_t i = 0; i < NUM_TRANSCRIPT_EVALUATIONS; ++i) { - proof.emplace_back(mock_val); - } + populate_field_elements(proof, NUM_TRANSCRIPT_EVALUATIONS); // Shplonk proof: commitment to the quotient - for (const FF& val : mock_commitment_frs) { - proof.emplace_back(val); - } + populate_field_elements_for_mock_commitments(proof, 1); // KZG proof: commitment to W - for (const FF& val : mock_commitment_frs) { - proof.emplace_back(val); - } + populate_field_elements_for_mock_commitments(proof, 1); BB_ASSERT_EQ(proof.size(), MERGE_PROOF_SIZE); return proof; } +/** + * @brief Create a mock pre-ipa proof which has the correct structure but is not necessarily valid + * + * @details An ECCVM proof is made of a pre-ipa proof and an ipa-proof. Here we mock the pre-ipa part. + * + * @return HonkProof + */ +HonkProof create_mock_pre_ipa_proof() +{ + using FF = ECCVMFlavor::FF; + HonkProof proof; + + // 1. NUM_WITNESS_ENTITIES commitments + populate_field_elements_for_mock_commitments(proof, ECCVMFlavor::NUM_WITNESS_ENTITIES); + + // 2. Libra concatenation commitment + populate_field_elements_for_mock_commitments(proof, /*num_commitments*/ 1); + + // 3. Libra sum + populate_field_elements(proof, 1); + + // 4. Sumcheck univariates commitments + 5. Sumcheck univariate evaluations + for (size_t idx = 0; idx < CONST_ECCVM_LOG_N; idx++) { + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + populate_field_elements(proof, /*num_elements=*/2); + } + + // 6. ALL_ENTITIES sumcheck evaluations + populate_field_elements(proof, ECCVMFlavor::NUM_ALL_ENTITIES); + + // 7. Libra evaluation + populate_field_elements(proof, 1); + + // 8. Libra grand sum commitment + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // 9. Libra quotient commitment + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // 10. Gemini masking commitment + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // 11. Gemini masking evaluations + populate_field_elements(proof, 1); + + // 12. Gemini fold commitments + populate_field_elements_for_mock_commitments(proof, + /*num_commitments=*/CONST_ECCVM_LOG_N - 1); + + // 13. Gemini evaluations + populate_field_elements(proof, CONST_ECCVM_LOG_N); + + // 14. NUM_SMALL_IPA_EVALUATIONS libra evals + populate_field_elements(proof, NUM_SMALL_IPA_EVALUATIONS); + + // 15. Shplonk + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // 16. Translator concatenated masking term commitment + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // 17. Translator op evaluation + populate_field_elements(proof, 1); + + // 18. Translator Px evaluation + populate_field_elements(proof, 1); + + // 19. Translator Py evaluation + populate_field_elements(proof, 1); + + // 20. Translator z1 evaluation + populate_field_elements(proof, 1); + + // 21. Translator z2 evaluation + populate_field_elements(proof, 1); + + // 22. Translator concatenated masking term evaluation + populate_field_elements(proof, 1); + + // 23. Translator grand sum commitment + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // 24. Translator quotient commitment + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // 25. Translator concatenation evaluation + populate_field_elements(proof, 1); + + // 26. Translator grand sum shift evaluation + populate_field_elements(proof, 1); + + // 27. Translator grand sum evaluation + populate_field_elements(proof, 1); + + // 28. Translator quotient evaluation + populate_field_elements(proof, 1); + + // 29. Shplonk + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + BB_ASSERT_EQ(proof.size(), ECCVMFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS - IPA_PROOF_LENGTH); + + return proof; +} + +/** + * @brief Create a mock ipa proof which has the correct structure but is not necessarily valid + * + * @details An ECCVM proof is made of a pre-ipa proof and an ipa-proof. Here we mock the ipa part. + * + * @return HonkProof + */ +HonkProof create_mock_ipa_proof() +{ + HonkProof proof; + + // Commitments to L and R for CONST_ECCVM_LOG_N round + populate_field_elements_for_mock_commitments( + proof, /*num_commitments=*/CONST_ECCVM_LOG_N + CONST_ECCVM_LOG_N); + + // Commitment to G_0 + populate_field_elements_for_mock_commitments(proof, /*num_commitments=*/1); + + // a_0 evaluation (a_0 is in the base field of BN254) + populate_field_elements(proof, 1); + + BB_ASSERT_EQ(proof.size(), IPA_PROOF_LENGTH); + + return proof; +} + +/** + * @brief Create a mock translator proof which has the correct structure but is not necessarily valid + * + * @return HonkProof + */ +HonkProof create_mock_translator_proof() +{ + using BF = TranslatorFlavor::BF; + using Curve = TranslatorFlavor::Curve; + + HonkProof proof; + HonkProof decider_proof = create_mock_decider_proof(TranslatorFlavor::CONST_TRANSLATOR_LOG_N); + + // 1. Accumulated result + populate_field_elements(proof, 1); + + // 2. NUM_WITNESS_ENTITIES commitments + populate_field_elements_for_mock_commitments(proof, + /*num_commitments=*/TranslatorFlavor::NUM_WITNESS_ENTITIES - 4); + + // Insert decider proof + proof.insert(proof.end(), decider_proof.begin(), decider_proof.end()); + + BB_ASSERT_EQ(proof.size(), TranslatorFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS); + + return proof; +} + /** * @brief Create a mock MegaHonk VK that has the correct structure * + * @param dyadic_size Dyadic size of the circuit for which we generate a vk + * @param pub_inputs_offest Indicating whether the circuit has a first zero row + * @param inner_public_inputs_size Number of public inputs coming from the ACIR constraints */ template std::shared_ptr create_mock_honk_vk(const size_t dyadic_size, - const size_t pub_inputs_offset) + const size_t pub_inputs_offset, + const size_t inner_public_inputs_size) { // Set relevant VK metadata and commitments auto honk_verification_key = std::make_shared(); honk_verification_key->log_circuit_size = bb::numeric::get_msb(dyadic_size); - honk_verification_key->num_public_inputs = PublicInputs::PUBLIC_INPUTS_SIZE; + honk_verification_key->num_public_inputs = inner_public_inputs_size + PublicInputs::PUBLIC_INPUTS_SIZE; honk_verification_key->pub_inputs_offset = pub_inputs_offset; // must be set correctly for (auto& commitment : honk_verification_key->get_all()) { @@ -235,33 +445,71 @@ template std::shared_ptr> crea } // Explicitly instantiate template functions -template HonkProof create_mock_oink_proof(); -template HonkProof create_mock_oink_proof(); -template HonkProof create_mock_oink_proof>(); - -template HonkProof create_mock_oink_proof>(); - -template HonkProof create_mock_decider_proof(); -template HonkProof create_mock_decider_proof(); - -template HonkProof create_mock_honk_proof(); -template HonkProof create_mock_honk_proof(); -template HonkProof create_mock_honk_proof>(); - -template HonkProof create_mock_honk_proof>(); +template HonkProof create_mock_oink_proof(const size_t); +template HonkProof create_mock_oink_proof(const size_t); +template HonkProof create_mock_oink_proof>( + const size_t); + +template HonkProof create_mock_oink_proof>( + const size_t); +template HonkProof create_mock_oink_proof>( + const size_t); +template HonkProof create_mock_oink_proof>( + const size_t); +template HonkProof create_mock_oink_proof>( + const size_t); +template HonkProof create_mock_oink_proof(const size_t); + +template HonkProof create_mock_decider_proof(const size_t); +template HonkProof create_mock_decider_proof(const size_t); +template HonkProof create_mock_decider_proof(const size_t); +template HonkProof create_mock_decider_proof(const size_t); +template HonkProof create_mock_decider_proof(const size_t); + +template HonkProof create_mock_honk_proof(const size_t); +template HonkProof create_mock_honk_proof(const size_t); +template HonkProof create_mock_honk_proof>( + const size_t); + +template HonkProof create_mock_honk_proof>( + const size_t); +template HonkProof create_mock_honk_proof>( + const size_t); +template HonkProof create_mock_honk_proof>( + const size_t); +template HonkProof create_mock_honk_proof>( + const size_t); +template HonkProof create_mock_honk_proof(const size_t); template HonkProof create_mock_pg_proof(); template HonkProof create_mock_pg_proof(); template HonkProof create_mock_pg_proof>(); template std::shared_ptr create_mock_honk_vk( - const size_t, const size_t); + const size_t, const size_t, const size_t); template std::shared_ptr create_mock_honk_vk( - const size_t, const size_t); + const size_t, const size_t, const size_t); template std::shared_ptr create_mock_honk_vk< MegaFlavor, - stdlib::recursion::honk::HidingKernelIO>(const size_t, const size_t); + stdlib::recursion::honk::HidingKernelIO>(const size_t, const size_t, const size_t); + +template std::shared_ptr create_mock_honk_vk< + UltraFlavor, + stdlib::recursion::honk::DefaultIO>(const size_t, const size_t, const size_t); +template std::shared_ptr create_mock_honk_vk< + UltraZKFlavor, + stdlib::recursion::honk::DefaultIO>(const size_t, const size_t, const size_t); +template std::shared_ptr create_mock_honk_vk< + UltraFlavor, + stdlib::recursion::honk::DefaultIO>(const size_t, const size_t, const size_t); +template std::shared_ptr create_mock_honk_vk< + UltraZKFlavor, + stdlib::recursion::honk::DefaultIO>(const size_t, const size_t, const size_t); +template std::shared_ptr create_mock_honk_vk( + const size_t, const size_t, const size_t); + template std::shared_ptr> create_mock_decider_vk(); } // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.hpp index eef92550e588..61f9c77df380 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.hpp @@ -14,15 +14,22 @@ namespace acir_format { -template bb::HonkProof create_mock_oink_proof(); -template bb::HonkProof create_mock_decider_proof(); -template bb::HonkProof create_mock_honk_proof(); +template +bb::HonkProof create_mock_oink_proof(const size_t inner_public_inputs_size = 0); +template +bb::HonkProof create_mock_decider_proof(const size_t const_proof_log_n = bb::CONST_PROOF_SIZE_LOG_N); +template +bb::HonkProof create_mock_honk_proof(const size_t inner_public_inputs_size = 0); template bb::HonkProof create_mock_pg_proof(); bb::Goblin::MergeProof create_mock_merge_proof(); +bb::HonkProof create_mock_pre_ipa_proof(); +bb::HonkProof create_mock_ipa_proof(); +bb::HonkProof create_mock_translator_proof(); template std::shared_ptr create_mock_honk_vk(const size_t dyadic_size, - const size_t pub_inputs_offset); + const size_t pub_inputs_offset, + const size_t inner_public_inputs_size = 0); template std::shared_ptr> create_mock_decider_vk(); } // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.test.cpp index 4429064470f3..6b5e4fc055ae 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/mock_verifier_inputs.test.cpp @@ -16,9 +16,12 @@ using namespace acir_format; using namespace bb; -template class MockVerifierInputsTest : public ::testing::Test {}; +template class MockVerifierInputsTest : public ::testing::Test { + public: + static void SetUpTestSuite() { bb::srs::init_file_crs_factory(bb::srs::bb_crs_path()); } +}; -using FlavorTypes = testing::Types; +using FlavorTypes = testing::Types; TYPED_TEST_SUITE(MockVerifierInputsTest, FlavorTypes); @@ -31,6 +34,33 @@ TEST(MockVerifierInputsTest, MockMergeProofSize) EXPECT_EQ(merge_proof.size(), MERGE_PROOF_SIZE); } +/** + * @brief Check that the size of a mock pre-ipa proof matches expectation + */ +TEST(MockVerifierInputsTest, MockPreIpaProofSize) +{ + HonkProof pre_ipa_proof = create_mock_pre_ipa_proof(); + EXPECT_EQ(pre_ipa_proof.size(), ECCVMFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS - IPA_PROOF_LENGTH); +} + +/** + * @brief Check that the size of a mock ipa proof matches expectation + */ +TEST(MockVerifierInputsTest, MockIPAProofSize) +{ + HonkProof ipa_proof = create_mock_ipa_proof(); + EXPECT_EQ(ipa_proof.size(), IPA_PROOF_LENGTH); +} + +/** + * @brief Check that the size of a mock translator proof matches expectation + */ +TEST(MockVerifierInputsTest, MockTranslatorProofSize) +{ + HonkProof translator_proof = create_mock_translator_proof(); + EXPECT_EQ(translator_proof.size(), TranslatorFlavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS); +} + /** * @brief Check that the size of a mock Oink proof matches expectation for MegaFlavor * @@ -63,18 +93,24 @@ TEST(MockVerifierInputsTest, MockMegaOinkProofSize) } /** - * @brief Check that the size of a mock Oink proof matches expectation for UltraFlavor + * @brief Check that the size of a mock Oink proof matches expectation for Ultra flavors * */ -TEST(MockVerifierInputsTest, MockUltraOinkProofSize) +TYPED_TEST(MockVerifierInputsTest, MockUltraOinkProofSize) { - using Flavor = UltraFlavor; - using Builder = UltraCircuitBuilder; - - // DefaultIO - const size_t NUM_PUBLIC_INPUTS = stdlib::recursion::honk::DefaultIO::PUBLIC_INPUTS_SIZE; - HonkProof honk_proof = create_mock_oink_proof>(); - EXPECT_EQ(honk_proof.size(), Flavor::OINK_PROOF_LENGTH_WITHOUT_PUB_INPUTS + NUM_PUBLIC_INPUTS); + using Flavor = TypeParam; + using Builder = Flavor::CircuitBuilder; + using IO = std::conditional_t, + stdlib::recursion::honk::RollupIO, + stdlib::recursion::honk::DefaultIO>; + + if (!std::is_same_v) { + const size_t NUM_PUBLIC_INPUTS = IO::PUBLIC_INPUTS_SIZE; + HonkProof honk_proof = create_mock_oink_proof(); + EXPECT_EQ(honk_proof.size(), Flavor::OINK_PROOF_LENGTH_WITHOUT_PUB_INPUTS + NUM_PUBLIC_INPUTS); + } else { + GTEST_SKIP(); + } } /** @@ -85,8 +121,12 @@ TYPED_TEST(MockVerifierInputsTest, MockDeciderProofSize) { using Flavor = TypeParam; - HonkProof honk_proof = create_mock_decider_proof(); - EXPECT_EQ(honk_proof.size(), Flavor::DECIDER_PROOF_LENGTH()); + if (!std::is_same_v) { + HonkProof honk_proof = create_mock_decider_proof(); + EXPECT_EQ(honk_proof.size(), Flavor::DECIDER_PROOF_LENGTH()); + } else { + GTEST_SKIP(); + } } /** @@ -121,16 +161,22 @@ TEST(MockVerifierInputsTest, MockMegaHonkProofSize) } /** - * @brief Check that the size of a mock Honk proof matches expectation for UltraFlavor + * @brief Check that the size of a mock Honk proof matches expectation for Ultra flavors * */ -TEST(MockVerifierInputsTest, MockHonkProofSize) +TYPED_TEST(MockVerifierInputsTest, MockHonkProofSize) { - using Flavor = UltraFlavor; - using Builder = UltraCircuitBuilder; - - // DefaultIO - const size_t NUM_PUBLIC_INPUTS = stdlib::recursion::honk::DefaultIO::PUBLIC_INPUTS_SIZE; - HonkProof honk_proof = create_mock_honk_proof>(); - EXPECT_EQ(honk_proof.size(), Flavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS() + NUM_PUBLIC_INPUTS); + using Flavor = TypeParam; + using Builder = Flavor::CircuitBuilder; + using IO = std::conditional_t, + stdlib::recursion::honk::RollupIO, + stdlib::recursion::honk::DefaultIO>; + + if (!std::is_same_v) { + const size_t NUM_PUBLIC_INPUTS = IO::PUBLIC_INPUTS_SIZE; + HonkProof honk_proof = create_mock_honk_proof(); + EXPECT_EQ(honk_proof.size(), Flavor::PROOF_LENGTH_WITHOUT_PUB_INPUTS() + NUM_PUBLIC_INPUTS); + } else { + GTEST_SKIP(); + } } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp index 2d9999b9c094..9aeaeb491a52 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp @@ -20,44 +20,6 @@ namespace acir_format { template class ProofSurgeon { public: - /** - * @brief Construct a string containing the inputs to a noir verify_proof call (to be written to a .toml) - * - * @param proof A complete bberg style proof (i.e. contains the public inputs) - * @param verification_key - * @param toml_path - */ - template - static std::string construct_recursion_inputs_toml_data(std::vector& proof, - const std::shared_ptr& verification_key, - bool ipa_accumulation) - { - // Convert verification key to fields - auto vk_fields = verification_key->to_field_elements(); - - // Get public inputs by cutting them out of the proof - size_t num_public_inputs_to_extract = - ipa_accumulation - ? static_cast(verification_key->num_public_inputs) - bb::RollupIO::PUBLIC_INPUTS_SIZE - : static_cast(verification_key->num_public_inputs) - bb::DefaultIO::PUBLIC_INPUTS_SIZE; - debug("proof size: ", proof.size()); - debug("number of public inputs to extract: ", num_public_inputs_to_extract); - std::vector public_inputs = cut_public_inputs_from_proof(proof, num_public_inputs_to_extract); - - // Construct json-style output for each component - std::string proof_json = field_elements_to_json(proof); - std::string pub_inputs_json = field_elements_to_json(public_inputs); - std::string vk_json = field_elements_to_json(vk_fields); - - // Format with labels for noir recursion input - std::string toml_content = "key_hash = " + format("\"", FF(0), "\"") + "\n"; // not used by honk - toml_content += "proof = " + proof_json + "\n"; - toml_content += "public_inputs = " + pub_inputs_json + "\n"; - toml_content += "verification_key = " + vk_json + "\n"; - - return toml_content; - } - /** * @brief Reconstruct a bberg style proof from a acir style proof + public inputs * @details Insert the public inputs in the middle the proof fields after 'inner_public_input_offset' because this diff --git a/barretenberg/cpp/src/barretenberg/stdlib/special_public_inputs/special_public_inputs.hpp b/barretenberg/cpp/src/barretenberg/stdlib/special_public_inputs/special_public_inputs.hpp index 5616092d7055..781ed73631d3 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/special_public_inputs/special_public_inputs.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/special_public_inputs/special_public_inputs.hpp @@ -135,8 +135,9 @@ class KernelIO { * @brief Manages the data that is propagated on the public inputs of an application/function circuit * */ -template class DefaultIO { +template class DefaultIO { public: + using Builder = Builder_; using Curve = stdlib::bn254; // curve is always bn254 using FF = Curve::ScalarField; using PairingInputs = stdlib::recursion::PairingPoints; diff --git a/ci3/docker_isolate b/ci3/docker_isolate index c021ce5f192a..711f6cbd0f6e 100755 --- a/ci3/docker_isolate +++ b/ci3/docker_isolate @@ -43,6 +43,7 @@ network_arg="--net=none" # Launch the container in the background. # Don't launch in the foreground or you can't process SIGINT/SIGTERM. # Don't use & as we want to block, and be sure it starts before processing any signals. +# We use --pid=host so that we can rely on $$ in bash scripts being (temporally) unique. set -x cid=$(docker run -d \ ${name_arg:-} \ @@ -50,6 +51,7 @@ cid=$(docker run -d \ ${cpuset_arg:-} \ --cpus=$CPUS \ --memory=$MEM \ + --pid=host \ --user $(id -u):$(id -g) \ -v$HOME:$HOME \ --mount type=tmpfs,target=/tmp,tmpfs-size=1g \ diff --git a/noir-projects/gates_report.sh b/noir-projects/gates_report.sh index 855472a01f32..3b59845efd31 100755 --- a/noir-projects/gates_report.sh +++ b/noir-projects/gates_report.sh @@ -44,9 +44,9 @@ for pathname in "$PROTOCOL_CIRCUITS_DIR/target"/*.json; do if [ "$IS_MEGA_HONK_CIRCUIT" = "true" ]; then GATES_INFO=$($BB_BIN gates --scheme client_ivc -b "$pathname") elif [ "$IS_ROLLUP_HONK_CIRCUIT" = "true" ]; then - GATES_INFO=$($BB_BIN gates --scheme ultra_honk -b "$pathname" --honk_recursion 2) + GATES_INFO=$($BB_BIN gates --scheme ultra_honk -b "$pathname" --ipa_accumulation) else - GATES_INFO=$($BB_BIN gates --scheme ultra_honk -b "$pathname" --honk_recursion 1) + GATES_INFO=$($BB_BIN gates --scheme ultra_honk -b "$pathname") fi MAIN_FUNCTION_INFO=$(echo $GATES_INFO | jq -r ".functions[0] | {package_name: "\"$ARTIFACT_NAME\"", functions: [{name: \"main\", opcodes: .acir_opcodes, circuit_size}]}") diff --git a/noir-projects/noir-protocol-circuits/bootstrap.sh b/noir-projects/noir-protocol-circuits/bootstrap.sh index 3e8b967c64dd..a3141f20672f 100755 --- a/noir-projects/noir-protocol-circuits/bootstrap.sh +++ b/noir-projects/noir-protocol-circuits/bootstrap.sh @@ -83,8 +83,7 @@ function compile { local write_vk_cmd="write_vk --scheme client_ivc --verifier_type standalone" elif echo "$name" | grep -qE "${rollup_honk_regex}"; then local proto="ultra_rollup_honk" - # --honk_recursion 2 injects a fake ipa claim - local write_vk_cmd="write_vk --scheme ultra_honk --ipa_accumulation --honk_recursion 2" + local write_vk_cmd="write_vk --scheme ultra_honk --ipa_accumulation" elif echo "$name" | grep -qE "rollup_root"; then local proto="ultra_keccak_honk" # the root rollup does not need to inject a fake ipa claim @@ -92,7 +91,7 @@ function compile { local write_vk_cmd="write_vk --scheme ultra_honk --oracle_hash keccak" else local proto="ultra_honk" - local write_vk_cmd="write_vk --scheme ultra_honk --init_kzg_accumulator --honk_recursion 1" + local write_vk_cmd="write_vk --scheme ultra_honk" fi # No vks needed for simulated circuits. [[ "$name" == *"simulated"* ]] && return @@ -210,11 +209,11 @@ function bench_cmds { for artifact in ./target/*.json; do [[ "$artifact" =~ _simulated ]] && continue if echo "$artifact" | grep -qEf <(printf '%s\n' "${ivc_patterns[@]}"); then - echo "$prefix $artifact client_ivc" + echo "$prefix $artifact --scheme client_ivc" elif echo "$artifact" | grep -qEf <(printf '%s\n' "${rollup_honk_patterns[@]}"); then - echo "$prefix $artifact ultra_honk 2" + echo "$prefix $artifact --scheme ultra_honk --ipa_accumulation" else - echo "$prefix $artifact ultra_honk 1" + echo "$prefix $artifact --scheme ultra_honk" fi done } diff --git a/noir-projects/noir-protocol-circuits/scripts/flamegraph.sh b/noir-projects/noir-protocol-circuits/scripts/flamegraph.sh index f4cb9b858518..5008b3cb1d37 100755 --- a/noir-projects/noir-protocol-circuits/scripts/flamegraph.sh +++ b/noir-projects/noir-protocol-circuits/scripts/flamegraph.sh @@ -161,9 +161,9 @@ for CIRCUIT_NAME in "${CIRCUIT_NAMES[@]}"; do if [ "$IS_MEGA_HONK_CIRCUIT" = "true" ]; then $PROFILER gates --artifact-path "${ARTIFACT}" --backend-path "$SCRIPT_DIR/../../../barretenberg/cpp/build/bin/bb" --output "$DEST" --output-filename "$CIRCUIT_NAME" --backend-gates-command "gates" --scheme client_ivc --include_gates_per_opcode elif [ "$IS_ROLLUP_HONK_CIRCUIT" = "true" ]; then - $PROFILER gates --artifact-path "${ARTIFACT}" --backend-path "$SCRIPT_DIR/../../../barretenberg/cpp/build/bin/bb" --output "$DEST" --output-filename "$CIRCUIT_NAME" --backend-gates-command "gates" --scheme ultra_honk --honk_recursion 2 --include_gates_per_opcode + $PROFILER gates --artifact-path "${ARTIFACT}" --backend-path "$SCRIPT_DIR/../../../barretenberg/cpp/build/bin/bb" --output "$DEST" --output-filename "$CIRCUIT_NAME" --backend-gates-command "gates" --scheme ultra_honk --ipa_accumulation --include_gates_per_opcode else - $PROFILER gates --artifact-path "${ARTIFACT}" --backend-path "$SCRIPT_DIR/../../../barretenberg/cpp/build/bin/bb" --output "$DEST" --output-filename "$CIRCUIT_NAME" --backend-gates-command "gates" --scheme ultra_honk --honk_recursion 1 --include_gates_per_opcode + $PROFILER gates --artifact-path "${ARTIFACT}" --backend-path "$SCRIPT_DIR/../../../barretenberg/cpp/build/bin/bb" --output "$DEST" --output-filename "$CIRCUIT_NAME" --backend-gates-command "gates" --scheme ultra_honk --include_gates_per_opcode fi echo "Flamegraph generated for circuit: $CIRCUIT_NAME" diff --git a/noir-projects/noir-protocol-circuits/scripts/run_bench.sh b/noir-projects/noir-protocol-circuits/scripts/run_bench.sh index 78a6521274c0..2c106fb4eef5 100755 --- a/noir-projects/noir-protocol-circuits/scripts/run_bench.sh +++ b/noir-projects/noir-protocol-circuits/scripts/run_bench.sh @@ -3,13 +3,13 @@ cd $(dirname $0)/.. artifact=$1 -scheme=$2 -rec=${3:-} +shift +# rest of $@ args are flags circuit_name=$(basename $artifact .json) mkdir -p ./bench-out -../../barretenberg/cpp/build/bin/bb gates -b $artifact --scheme $scheme ${rec:+--honk_recursion $rec} | +../../barretenberg/cpp/build/bin/bb gates -b $artifact "$@" | jq --arg name $circuit_name '[ { name: ($name + "_opcodes"), unit: "opcodes", value: .functions[0].acir_opcodes }, { name: ($name + "_gates"), unit: "gates", value: .functions[0].circuit_size } diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index cac4e96c1d27..e41a43db4c22 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -217,7 +217,6 @@ export async function generateProof( workingDirectory: string, circuitName: string, bytecode: Buffer, - recursive: boolean, inputWitnessFile: string, flavor: UltraHonkFlavor, log: Logger, @@ -260,9 +259,6 @@ export async function generateProof( inputWitnessFile, '-v', ]); - if (recursive) { - args.push('--init_kzg_accumulator'); - } const loggingArg = log.level === 'debug' || log.level === 'trace' ? '-d' : log.level === 'verbose' ? '-v' : ''; if (loggingArg !== '') { args.push(loggingArg); diff --git a/yarn-project/bb-prover/src/prover/server/bb_prover.ts b/yarn-project/bb-prover/src/prover/server/bb_prover.ts index 124248d71a83..cb1d045476f3 100644 --- a/yarn-project/bb-prover/src/prover/server/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/server/bb_prover.ts @@ -98,9 +98,6 @@ import { readProofAsFields, writeClientIVCProofToOutputDirectory } from '../proo const logger = createLogger('bb-prover'); -// All `ServerCircuitArtifact` are recursive. -const SERVER_CIRCUIT_RECURSIVE = true; - export interface BBProverConfig extends BBConfig, ACVMConfig { // list of circuits supported by this prover. defaults to all circuits if empty circuitFilter?: ServerProtocolArtifact[]; @@ -470,7 +467,6 @@ export class BBNativeRollupProver implements ServerCircuitProver { workingDirectory, circuitType, Buffer.from(artifact.bytecode, 'base64'), - SERVER_CIRCUIT_RECURSIVE, outputWitnessFile, getUltraHonkFlavorForCircuit(circuitType), logger, diff --git a/yarn-project/ivc-integration/src/prove_native.ts b/yarn-project/ivc-integration/src/prove_native.ts index efd59e317dbd..c61bb510095c 100644 --- a/yarn-project/ivc-integration/src/prove_native.ts +++ b/yarn-project/ivc-integration/src/prove_native.ts @@ -125,7 +125,6 @@ async function proveRollupCircuit