diff --git a/.github/workflows/ci3-external.yml b/.github/workflows/ci3-external.yml index 0440104c09d7..6cd68b0a0934 100644 --- a/.github/workflows/ci3-external.yml +++ b/.github/workflows/ci3-external.yml @@ -16,7 +16,7 @@ jobs: ci-external: runs-on: ubuntu-latest # exclusive with ci3.yml, only run on forks. - if: github.event.pull_request.head.repo.full_name != github.repository + if: github.event.pull_request.head.repo.fork steps: ############# # Prepare Env @@ -28,14 +28,11 @@ jobs: ref: ${{ github.event.pull_request.head.sha || github.sha }} - name: Fail If Draft - if: github.event.pull_request.draft && (github.event.action != 'labeled' || github.event.label.name != 'trigger-workflow') + if: github.event.pull_request.draft run: echo "CI is not run on drafts." && exit 1 - name: External Contributor Checks # Run only if a pull request event type and we have a forked repository origin. - if: | - (github.event_name == 'pull_request' || github.event_name == 'pull_request_target') && - github.event.pull_request.head.repo.full_name != github.repository run: | set -o pipefail git fetch origin ${{ github.event.pull_request.base.ref }} --depth=1 &>/dev/null @@ -48,7 +45,7 @@ jobs: echo "Error: External PRs can only target master, targeted: ${{ github.event.pull_request.base.ref }}." exit 1 fi - labeled="${{contains(github.event.pull_request.labels.*.name, 'ci-external') || contains(github.event.pull_request.labels.*.name, 'ci-external-once')}}" + labeled="${{contains(github.event.pull_request.labels.*.name, 'ci-external') || github.event.label.name == 'ci-external-once'}}" if [ "$labeled" = false ]; then echo "External PRs need the 'ci-external' or 'ci-external-once' labels to run." exit 1 diff --git a/.github/workflows/ci3.yml b/.github/workflows/ci3.yml old mode 100644 new mode 100755 index fe1f13b68ca5..fb589685cdc3 --- a/.github/workflows/ci3.yml +++ b/.github/workflows/ci3.yml @@ -25,8 +25,9 @@ concurrency: jobs: ci: runs-on: ubuntu-latest - # exclusive with ci3-external.yml: if it is a pull request target only run if it is NOT a fork. - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository + # exclusive with ci3-external.yml: never run on forks + # (github.event.pull_request.head.repo.fork resolves to nil if not a pull request) + if: github.event.pull_request.head.repo.fork != true environment: ${{ startsWith(github.ref, 'refs/tags/v') && 'master' || '' }} strategy: fail-fast: false @@ -89,12 +90,11 @@ jobs: - name: Download benchmarks if: matrix.settings.arch == 'amd64' && github.event_name == 'push' && github.ref_name == 'master' run: | - ./ci3/cache_download barretenberg-bench-results-$(git rev-parse HEAD).tar.gz - ./ci3/cache_download yarn-project-bench-results-$(git rev-parse HEAD).tar.gz - # ./ci3/cache_download yarn-project-p2p-bench-results-$(git rev-parse HEAD).tar.gz + # Note: sets SKIP_BB_BENCH in GITHUB_ENV for below step. + ./ci.sh gh-bench - name: Store barretenberg benchmark result - if: matrix.settings.arch == 'amd64' && github.event_name == 'push' && github.ref_name == 'master' + if: matrix.settings.arch == 'amd64' && github.event_name == 'push' && github.ref_name == 'master' && env.SKIP_BB_BENCH != 'true' continue-on-error: true uses: benchmark-action/github-action-benchmark@4de1bed97a47495fc4c5404952da0499e31f5c29 with: diff --git a/.test_patterns.yml b/.test_patterns.yml index e27552067056..5a5d2ba4cdcd 100644 --- a/.test_patterns.yml +++ b/.test_patterns.yml @@ -160,8 +160,23 @@ tests: owners: - *leila - - regex: "spartan/bootstrap.sh test-local" - skip: true + - regex: "spartan/bootstrap.sh test-kind-upgrade-rollup-version" + owners: + - *adam + + - regex: "spartan/bootstrap.sh test-cli-upgrade-with-lock" + owners: + - *adam + + - regex: "spartan/bootstrap.sh test-kind-smoke" + owners: + - *adam + + - regex: "spartan/bootstrap.sh test-kind-transfer" + owners: + - *adam + + - regex: "spartan/bootstrap.sh test-prod-deployment" owners: - *adam diff --git a/.vscode/settings.json b/.vscode/settings.json index d07820ba2e36..0efc903622b9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -181,5 +181,11 @@ "typescript.tsserver.maxTsServerMemory": 4096, "typescript.tsdk": "yarn-project/node_modules/typescript/lib", "markdown.extension.toc.levels": "2..6", - "noir.nargoPath": "${workspaceFolder}/noir/noir-repo/target/release/nargo" + "noir.nargoPath": "${workspaceFolder}/noir/noir-repo/target/release/nargo", + "auto-close-tag.activationOnLanguage": [ + "xml", + "markdown", + "HTML (EEx)", + "HTML (Eex)" + ] } diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run index 53044ad1d960..ecdd336f518d 100755 --- a/aztec-up/bin/.aztec-run +++ b/aztec-up/bin/.aztec-run @@ -13,7 +13,8 @@ # Such stuff belongs in higher level e.g. `aztec` or `aztec-wallet` scripts. set -euo pipefail -CONTAINER_NAME=$1 +# Can be overriden by env var CONTAINER_NAME, e.g. to start two sandboxes. +CONTAINER_NAME=${CONTAINER_NAME:-$1} shift VERSION=${VERSION:-"latest"} diff --git a/barretenberg/README.md b/barretenberg/README.md index 91e0a0108d1e..7244d26377d8 100644 --- a/barretenberg/README.md +++ b/barretenberg/README.md @@ -4,8 +4,6 @@ > [!WARNING] > :warning: ** is a mirror-only repository, please only use . Do not use this for any purpose other than reference.** :warning: -![banner](../.github/img/bb_banner.png) - # Barretenberg Barretenberg (or `bb` for short) is an optimized elliptic curve library for the bn128 curve, and a PLONK SNARK prover. @@ -473,6 +471,11 @@ command script import ~/aztec-packages/barretenberg/cpp/scripts/lldb_format.py Now when you `print` things with e.g. `print bigfield_t.get_value()` or inspect in VSCode (if you opened the debug console and put in these commands) then you will get pretty-printing of these types. This can be expanded fairly easily with more types if needed. + +#### Debugging and profiling realistic ClientIVC flows + +To download realistic ClientIVC benchmark inputs from last master, use `./barretenberg/cpp/bootstrap.sh download_e2e_ivc_inputs` and run ClientIVC proving with --input runtime_stack on those inputs. By default, tries to pull from last master, but you can pass a historic commit as an argument. + #### Using Tracy to Profile Memory/CPU/Gate Counts Tracy is a tool that gives us an in-depth look at certain performance related metrics, including memory, CPU usage, time, and circuit gate counts. diff --git a/barretenberg/acir_tests/bench_acir_tests.sh b/barretenberg/acir_tests/bench_acir_tests.sh index 0db53d6b2d5d..70d94d34497f 100755 --- a/barretenberg/acir_tests/bench_acir_tests.sh +++ b/barretenberg/acir_tests/bench_acir_tests.sh @@ -3,7 +3,6 @@ set -e cd "$(dirname "$0")" -set -x TEST_NAMES=("$@") THREADS=(1 4 16 32 64) BENCHMARKS=$LOG_FILE diff --git a/barretenberg/bootstrap.sh b/barretenberg/bootstrap.sh index 93aedc96cb33..e268d056370a 100755 --- a/barretenberg/bootstrap.sh +++ b/barretenberg/bootstrap.sh @@ -5,24 +5,46 @@ source $(git rev-parse --show-toplevel)/ci3/source # Download ignition up front to ensure no race conditions at runtime. [ -n "${SKIP_BB_CRS:-}" ] || ./scripts/download_bb_crs.sh -./bbup/bootstrap.sh $@ -./cpp/bootstrap.sh $@ -./ts/bootstrap.sh $@ -./acir_tests/bootstrap.sh $@ +function bootstrap_all { + ./bbup/bootstrap.sh $@ + ./cpp/bootstrap.sh $@ + ./ts/bootstrap.sh $@ + ./acir_tests/bootstrap.sh $@ +} -cmd=${1:-} -if [ "$cmd" == "bench" ]; then - rm -rf bench-out && mkdir -p bench-out - ./scripts/combine_benchmarks.py \ - native ./cpp/bench-out/client_ivc_17_in_20_release.json \ - native ./cpp/bench-out/client_ivc_release.json \ - native ./cpp/bench-out/ultra_honk_release.json \ - wasm ./cpp/bench-out/client_ivc_wasm.json \ - wasm ./cpp/bench-out/ultra_honk_wasm.json \ - "" ./cpp/bench-out/client_ivc_op_count.json \ - "" ./cpp/bench-out/client_ivc_op_count_time.json \ - wasm ./acir_tests/bench-out/ultra_honk_rec_wasm_memory.txt \ +function hash { + cache_content_hash "^barretenberg" +} + +function bench { + rm -rf bench-out && mkdir -p bench-out + local hash=$(hash) + if cache_download barretenberg-bench-results-$hash.tar.gz; then + return + fi + bootstrap_all bench + ./scripts/combine_benchmarks.py \ + ./cpp/bench-out/*.json \ + ./acir_tests/bench-out/*.txt \ > ./bench-out/bb-bench.json + cache_upload barretenberg-bench-results-$hash.tar.gz ./bench-out/bb-bench.json +} + +cmd=${1:-} +[ -n "$cmd" ] && shift - cache_upload barretenberg-bench-results-$COMMIT_HASH.tar.gz ./bench-out/bb-bench.json -fi +case "$cmd" in + hash) + hash + ;; + ""|clean|ci|fast|test|test_cmds|release) + bootstrap_all $cmd $@ + ;; + bench) + bench + ;; + *) + echo "Unknown command: $cmd" + exit 1 + ;; +esac diff --git a/barretenberg/cpp/.gitignore b/barretenberg/cpp/.gitignore index 89bbad4d6b0f..af67ca20d4fe 100644 --- a/barretenberg/cpp/.gitignore +++ b/barretenberg/cpp/.gitignore @@ -8,6 +8,7 @@ srs_db/*/bn254_g* CMakeUserPresets.json .vscode/settings.json acir_tests +client-ivc-inputs-out # we may download go in scripts/collect_heap_information.sh go*.tar.gz barretenberg_modules.dot diff --git a/barretenberg/cpp/bootstrap.sh b/barretenberg/cpp/bootstrap.sh index d0286daccd77..cd78f7a435d8 100755 --- a/barretenberg/cpp/bootstrap.sh +++ b/barretenberg/cpp/bootstrap.sh @@ -7,6 +7,7 @@ cmd=${1:-} export preset=clang16-assert export pic_preset="clang16-pic" export hash=$(cache_content_hash .rebuild_patterns) +export capture_ivc_folder=../../yarn-project/end-to-end/private-flows-ivc-inputs-out # Injects version number into a given bb binary. # Means we don't actually need to rebuild bb to release a new version if code hasn't changed. @@ -173,7 +174,7 @@ function test_cmds { grep -v 'DISABLED_' | \ while read -r test; do echo -e "$hash barretenberg/cpp/scripts/run_test.sh $bin_name $test" - done + done || (echo "Failed to list tests in $bin" && exit 1) done } @@ -205,6 +206,10 @@ function bench { rm -rf bench-out && mkdir -p bench-out + # A bit pattern breaking, but the best code to instrument our private IVC flows exists in yarn-project, + # while the best code for benchmarking these IVC flows exists here. + # ../../yarn-project/end-to-end/bootstrap.sh generate_private_ivc_inputs + # Ultra honk. function ultra_honk_release { ./build/bin/ultra_honk_bench \ @@ -245,14 +250,44 @@ function bench { --benchmark_out=./bench-out/client_ivc_wasm.json \ --benchmark_filter="ClientIVCBench/Full/6$" } + function client_ivc_flow { + set -eu + local flow=$1 + local inputs_folder="$capture_ivc_folder/$flow" + local start=$(date +%s%N) + local maybe_allow_fail="false" + # TODO(AD) this should verify! + if [ "$flow" == "amm-add-liquidity" ]; then + maybe_allow_fail="true" + fi + mkdir -p "bench-out/$flow-proof-files" + ./build/bin/bb prove -o "bench-out/$flow-proof-files" -b "$inputs_folder/acir.msgpack" -w "$inputs_folder/witnesses.msgpack" --scheme client_ivc --input_type runtime_stack || $maybe_allow_fail + echo "$flow has proven." + local end=$(date +%s%N) + local elapsed_ns=$(( end - start )) + local elapsed_ms=$(( elapsed_ns / 1000000 )) + cat > "./bench-out/$flow-ivc.json" <; /** - * @brief A full proof for the IVC scheme containing a Mega proof showing correctness of the hiding circuit (which + * @brief A full proof for the IVC scheme containing a Mega proof showing correctness of the hiding circuit (which * recursive verified the last folding and decider proof) and a Goblin proof (translator VM, ECCVM and last merge * proof). * diff --git a/barretenberg/scripts/combine_benchmarks.py b/barretenberg/scripts/combine_benchmarks.py index 709aa04f0624..c0d1aa53dd66 100755 --- a/barretenberg/scripts/combine_benchmarks.py +++ b/barretenberg/scripts/combine_benchmarks.py @@ -3,70 +3,139 @@ import sys import re +# Counters to be used for extracting benchmark data from JSON files. TIME_COUNTERS_USED = ["commit(t)", "Goblin::merge(t)"] -def modify_benchmark_data(file_paths, prefixes): - combined_results = {"benchmarks": []} - memory_pattern = re.compile(r"\(mem: ([\d.]+)MiB\)") - - for file_path, prefix in zip(file_paths, prefixes): - with open(file_path, 'r') as file: - # if file is a txt, load as text - if file_path.endswith(".txt"): - last_memory = None - for line in reversed(file.readlines()): - match = memory_pattern.search(line) - if match: - last_memory = match.group(1) - break - if last_memory: - new_entry = { - "name": f"{prefix}UltraHonkVerifierWasmMemory", - "real_time": last_memory, - "time_unit": "MiB" - } - combined_results['benchmarks'].append(new_entry) - else: - print(f"Warning: No memory found in {file_path}") - else: - data = json.load(file) - # Modify benchmark names and extract specific data - for benchmark in data['benchmarks']: - # Prefix benchmark names - benchmark['name'] = f"{prefix}{benchmark['name']}" - benchmark['run_name'] = f"{prefix}{benchmark['run_name']}" - - if prefix != "": - combined_results['benchmarks'].append(benchmark) - # Isolate batch_mul_with_endomorphism - for counter in TIME_COUNTERS_USED: - if counter in benchmark: - new_entry = { - "name": f"{counter}", - "run_name": benchmark['run_name'], - "run_type": benchmark['run_type'], - "repetitions": benchmark['repetitions'], - "repetition_index": benchmark['repetition_index'], - "threads": benchmark['threads'], - "iterations": benchmark['iterations'], - "real_time": benchmark[counter], - "cpu_time": benchmark[counter], - "time_unit": "ns" - } - combined_results['benchmarks'].append(new_entry) +# field op weights based on these numbers captured by Kesha (nanoseconds) +# * cycle_waste : 0.5 +# * ff_addition : 3.8 +# * ff_from_montgomery : 19.1 +# * ff_invert : 7001.3 +# * ff_multiplication : 21.3 +# * ff_reduce : 5.1 +# * ff_sqr : 17.9 +# * ff_to_montgomery : 39.1 +# * parallel_for_field_element_addition : 376060.9 +# * projective_point_accidental_doubling : 347.6 +# * projective_point_addition : 348.6 +# * projective_point_doubling : 194.2 +# * scalar_multiplication : 50060.1 +# * sequential_copy : 3.3 - return combined_results +# Cody analyzed the following asm operations as not correlated with one another: +FIELD_OPS_WEIGHTS = { + "fr::asm_add_with_coarse_reduction": 3.8, + "fr::asm_conditional_negate": 3.8, + "fr::asm_mul_with_coarse_reduction": 21.3, + "fr::asm_self_add_with_coarse_reduction": 3.8, + "fr::asm_self_mul_with_coarse_reduction": 21.3, + "fr::asm_self_reduce_once": 3.8, + "fr::asm_self_sqr_with_coarse_reduction": 21.3, + "fr::asm_self_sub_with_coarse_reduction": 3.8, + "fr::asm_sqr_with_coarse_reduction": 21.3, +} + +MEMORY_PATTERN = re.compile(r"\(mem: ([\d.]+)MiB\)") + +def process_json_field_ops_weighted_sum(benchmark): + weighted_sum = 0 + for key, weight in FIELD_OPS_WEIGHTS.items(): + if key in benchmark: + count = int(benchmark[key]) + if count is not None: + # Calculate the weighted sum of field operations + weighted_sum += count * weight + return weighted_sum + +def extract_memory_from_text(file_path): + """ + Extracts the last memory value from a text file by searching in reverse order. + """ + with open(file_path, 'r') as file: + # Iterate over the file lines in reverse to get the last memory occurrence + for line in reversed(file.readlines()): + match = MEMORY_PATTERN.search(line) + if match: + return match.group(1) + return None + +def process_json_file(file_path, prefix): + """ + Processes a JSON file to prefix benchmark names and extract additional counter data. + """ + # print to stderr + print(f"Processing JSON file: {file_path}", file=sys.stderr) + with open(file_path, 'r') as file: + data = json.load(file) -# Using command line arguments to get prefixes and file paths -if len(sys.argv) < 3 or len(sys.argv) % 2 != 1: - print("Usage: python script.py ...") - sys.exit(1) + results = [] + for benchmark in data['benchmarks']: + # Prefix the benchmark's name and run name + benchmark['name'] = f"{prefix}{benchmark['name']}" -prefixes = sys.argv[1::2] -file_paths = sys.argv[2::2] + # Include benchmark only if a prefix is provided. + if prefix != "": + results.append(benchmark) + + field_ops_heuristic = process_json_field_ops_weighted_sum(benchmark) + if field_ops_heuristic > 0: + # Add the field ops heuristic to the benchmark entry. + benchmark["field_ops_heuristic"] = field_ops_heuristic + results.append({ + "name": "field_ops_heuristic", + "real_time": field_ops_heuristic, + "time_unit": "ns" + }) + + # For each counter, if it exists in the benchmark, create a new entry. + for counter in TIME_COUNTERS_USED: + if counter in benchmark: + results.append({ + "name": f"{counter}", + "real_time": benchmark[counter], + "time_unit": "ns" + }) + return results + +def modify_benchmark_data(file_paths): + """ + Combines benchmark data from multiple files (both text and JSON) with associated prefixes. + """ + combined_results = {"benchmarks": []} + + for file_path in file_paths: + prefix = "" + # Historical name compatibility: + if "wasm" in file_path: + prefix = "wasm" + elif "release" in file_path: + prefix = "native" + elif "-ivc.json" in file_path: + prefix = "ivc-" + if file_path.endswith(".txt"): + # Process text files to extract memory data. + memory_value = extract_memory_from_text(file_path) + if memory_value: + entry = { + "name": f"{prefix}UltraHonkVerifierWasmMemory", + "real_time": memory_value, + "time_unit": "MiB" + } + combined_results['benchmarks'].append(entry) + else: + print(f"Warning: No memory value found in {file_path}") + else: + # Process JSON files to update benchmark entries. + benchmarks = process_json_file(file_path, prefix) + combined_results['benchmarks'].extend(benchmarks) + return combined_results -final_data = modify_benchmark_data(file_paths, prefixes) +def main(): + file_paths = sys.argv[1::] + final_data = modify_benchmark_data(file_paths) -# Save the combined results to a file -print(json.dumps(final_data, indent=4)) + # Output the combined benchmark data as formatted JSON. + print(json.dumps(final_data, indent=4)) +if __name__ == "__main__": + main() diff --git a/barretenberg/ts/bootstrap.sh b/barretenberg/ts/bootstrap.sh index ee3bdc50092c..ea0c46dcffa2 100755 --- a/barretenberg/ts/bootstrap.sh +++ b/barretenberg/ts/bootstrap.sh @@ -59,7 +59,7 @@ case "$cmd" in echo "$hash" ;; "bench") - echo "ts/bootstrap.sh bench is empty" + # Empty handling just to make this command valid. ;; test|test_cmds|release) $cmd diff --git a/bootstrap.sh b/bootstrap.sh index b2162a8363c2..a46c562cf60e 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -236,7 +236,7 @@ function release { # + noir # + yarn-project => NPM publish to dist tag, version is our REF_NAME without a leading v. # aztec-up => upload scripts to prod if dist tag is latest - # docs => publish docs if dist tag is latest. TODO Link build in github release. + # docs, playground => publish if dist tag is latest. TODO Link build in github release. # release-image => push docker image to dist tag. # boxes/l1-contracts => mirror repo to branch equal to dist tag (master if latest). Also mirror to tag equal to REF_NAME. @@ -262,7 +262,7 @@ function release { release-image ) if [ $(arch) == arm64 ]; then - echo "Only deploying packages with platform-specific binaries on arm64." + echo "Only releasing packages with platform-specific binaries on arm64." projects=( barretenberg/cpp release-image diff --git a/ci.sh b/ci.sh index 55de3815bf94..44aa1f0a563c 100755 --- a/ci.sh +++ b/ci.sh @@ -239,6 +239,40 @@ case "$cmd" in "help"|"") print_usage ;; + "gh-bench") + export CI=1 + # Run benchmark logic for github actions. + bb_hash=$(barretenberg/bootstrap.sh hash) + yp_hash=$(yarn-project/bootstrap.sh hash) + seven_days=$((7 * 24 * 60 * 60)) # in seconds + + if [ "$bb_hash" == disabled-cache ] || [ "$yp_hash" == disabled-cache ]; then + echo "Error, can't publish benchmarks due to unstaged changes." + git status -s + exit 1 + fi + + # barretenberg benchmarks. + if [ "$(redis_getz last-publish-hash-bb)" == "$bb_hash" ]; then + echo "No changes since last master, skipping barretenberg benchmark publishing." + echo "SKIP_BB_BENCH=true" >> $GITHUB_ENV + else + cache_download barretenberg-bench-results-$bb_hash.tar.gz + seven_days=$((7 * 24 * 60 * 60)) # in seconds + echo "$bb_hash" | redis_setexz last-publish-hash-bb $seven_days + fi + + # yarn-project benchmarks. + if [ "$(redis_getz last-publish-hash-yp)" == "$yp_hash" ]; then + echo "No changes since last master, skipping yarn-project benchmark publishing." + echo "SKIP_YP_BENCH=true" >> $GITHUB_ENV + else + cache_download yarn-project-bench-results-$yp_hash.tar.gz + # TODO reenable + # ./cache_download yarn-project-p2p-bench-results-$(git rev-parse HEAD).tar.gz + echo "$bb_hash" | redis_setexz last-publish-hashs-bb $seven_days + fi + ;; "uncached-tests") if [ -z "$CI_REDIS_AVAILABLE" ]; then echo "Not connected to CI redis." diff --git a/ci3/echo_header b/ci3/echo_header index 277e0334c35f..bf8c05953a02 100755 --- a/ci3/echo_header +++ b/ci3/echo_header @@ -1,5 +1,5 @@ #!/usr/bin/env bash set -eu -source $ci3/source_color +source $(dirname "$0")/source_color -echo -e "${purple}---${reset} ${blue}${bold}$@${reset} ${purple}---${reset}" \ No newline at end of file +echo -e "${purple}---${reset} ${blue}${bold}$@${reset} ${purple}---${reset}" diff --git a/ci3/run_test_cmd b/ci3/run_test_cmd index ffa7f0b86808..6dc92c6956a5 100755 --- a/ci3/run_test_cmd +++ b/ci3/run_test_cmd @@ -98,11 +98,15 @@ function flake { "text": "${slack_uids% }: Test flaked on *$REF_NAME*: \`$test_cmd\` http://ci.aztec-labs.com/$log_key" } EOF - curl -X POST https://slack.com/api/chat.postMessage \ - -H "Authorization: Bearer $SLACK_BOT_TOKEN" \ - -H "Content-type: application/json" \ - --data "$data" &>/dev/null - + if [ -n "${SLACK_BOT_TOKEN:-}" ]; then + curl -X POST https://slack.com/api/chat.postMessage \ + -H "Authorization: Bearer $SLACK_BOT_TOKEN" \ + -H "Content-type: application/json" \ + --data "$data" &>/dev/null + else + # If we can't post to slack, print to console + echo -e "${red}FLAKED${reset}${log_info:-}: $test_cmd (${SECONDS}s) (code: $code)" + fi exit } diff --git a/noir-projects/noir-contracts/bootstrap.sh b/noir-projects/noir-contracts/bootstrap.sh index 790043591cdf..1ae4dd733e6e 100755 --- a/noir-projects/noir-contracts/bootstrap.sh +++ b/noir-projects/noir-contracts/bootstrap.sh @@ -31,7 +31,7 @@ export BB_HASH=$(cache_content_hash ../../barretenberg/cpp/.rebuild_patterns) export tmp_dir=./target/tmp # Create our tmp working directory, ensure it's removed on exit. -function on_exit() { +function on_exit { rm -rf $tmp_dir rm -f joblog.txt } @@ -46,7 +46,7 @@ export PARALLEL_FLAGS="-j${PARALLELISM:-16} --halt now,fail=1 --memsuspend $(mem # stdout receives the function json with the vk added (if private). # The function is exported and called by a sub-shell in parallel, so we must "set -eu" etc.. # If debugging, a set -x at the start can help. -function process_function() { +function process_function { set -euo pipefail local func name bytecode_b64 hash vk diff --git a/noir-projects/noir-protocol-circuits/bootstrap.sh b/noir-projects/noir-protocol-circuits/bootstrap.sh index 7bc5f830fae1..3c0383e11ccb 100755 --- a/noir-projects/noir-protocol-circuits/bootstrap.sh +++ b/noir-projects/noir-protocol-circuits/bootstrap.sh @@ -48,7 +48,7 @@ rollup_honk_regex=$(IFS="|"; echo "${rollup_honk_patterns[*]}") keccak_honk_regex=rollup_root verifier_generate_regex=rollup_root -function on_exit() { +function on_exit { rm -f joblog.txt } trap on_exit EXIT diff --git a/noir/bootstrap.sh b/noir/bootstrap.sh index 4e7321af5dd9..bbf3fae55b6f 100755 --- a/noir/bootstrap.sh +++ b/noir/bootstrap.sh @@ -43,15 +43,16 @@ export RUSTFLAGS="-Dwarnings" # Builds nargo, acvm and profiler binaries. function build_native { set -euo pipefail - cd noir-repo if cache_download noir-$hash.tar.gz; then return fi + cd noir-repo parallel --tag --line-buffer --halt now,fail=1 ::: \ "cargo fmt --all --check" \ "cargo build --locked --release --target-dir target" \ "cargo clippy --target-dir target/clippy --workspace --locked --release" - cache_upload noir-$hash.tar.gz target/release/nargo target/release/acvm target/release/noir-profiler + cd .. + cache_upload noir-$hash.tar.gz noir-repo/target/release/{nargo,acvm,noir-profiler} } # Builds js packages. diff --git a/noir/postcheckout.sh b/noir/postcheckout.sh index 7394debab8e9..af054caaae85 100755 --- a/noir/postcheckout.sh +++ b/noir/postcheckout.sh @@ -5,7 +5,7 @@ set -euo pipefail cd $(dirname $0) -is_branch=$3 +is_branch=${3:-} if [ "$is_branch" == "1" ] && scripts/sync.sh needs-patch; then echo "Warning: the noir-repo has outstanding commits that need to be put in a patch file" diff --git a/playground/bootstrap.sh b/playground/bootstrap.sh index 69d8efdd2667..bfe18c60979f 100755 --- a/playground/bootstrap.sh +++ b/playground/bootstrap.sh @@ -3,11 +3,7 @@ source $(git rev-parse --show-toplevel)/ci3/source_bootstrap cmd=${1:-} -hash=$(cache_content_hash \ - .rebuild_patterns \ - ../noir/.rebuild_patterns \ - ../{avm-transpiler,noir-projects,l1-contracts,yarn-project}/.rebuild_patterns \ - ../barretenberg/*/.rebuild_patterns) +hash=$(hash_str $(cache_content_hash .rebuild_patterns) $(../yarn-project/bootstrap.sh hash)) function build { echo_header "playground build" diff --git a/spartan/bootstrap.sh b/spartan/bootstrap.sh index a7a0cd8eb7c1..bf5b878805a4 100755 --- a/spartan/bootstrap.sh +++ b/spartan/bootstrap.sh @@ -49,7 +49,6 @@ function gke { } function test_cmds { - echo "$hash timeout -v 20m ./spartan/bootstrap.sh test-local" if [ "$(arch)" == "arm64" ]; then # Currently maddiaa/eth2-testnet-genesis is not published for arm64. Skip KIND tests. return @@ -143,10 +142,6 @@ case "$cmd" in FRESH_INSTALL=${FRESH_INSTALL:-true} INSTALL_METRICS=false \ ./scripts/test_kind.sh src/spartan/upgrade_via_cli.test.ts 1-validators.yaml upgrade-via-cli${NAME_POSTFIX:-} ;; - "test-local") - # Isolate network stack in docker. - docker_isolate ../scripts/run_native_testnet.sh -i -val 3 - ;; *) echo "Unknown command: $cmd" exit 1 diff --git a/spartan/scripts/deploy_spartan.sh b/spartan/scripts/deploy_spartan.sh index b30b239b7ffd..c64e6d658cf4 100755 --- a/spartan/scripts/deploy_spartan.sh +++ b/spartan/scripts/deploy_spartan.sh @@ -30,7 +30,7 @@ if [[ "$current_context" =~ ^kind- ]]; then exit 1 fi -function cleanup() { +function cleanup { set +x # kill everything in our process group except our process trap - SIGTERM && kill $(pgrep -g $$ | grep -v $$) $(jobs -p) &>/dev/null || true diff --git a/yarn-project/aztec.js/src/api/contract.ts b/yarn-project/aztec.js/src/api/contract.ts index e128adf0e465..89ae3d5392a9 100644 --- a/yarn-project/aztec.js/src/api/contract.ts +++ b/yarn-project/aztec.js/src/api/contract.ts @@ -36,11 +36,9 @@ * @packageDocumentation */ export { Contract } from '../contract/contract.js'; -export { - ContractFunctionInteraction, - type ProfileResult, - type SendMethodOptions, -} from '../contract/contract_function_interaction.js'; +export { ContractFunctionInteraction, type SendMethodOptions } from '../contract/contract_function_interaction.js'; + +export { TxProfileResult } from '@aztec/stdlib/tx'; export { DefaultWaitOpts, SentTx, type WaitOpts } from '../contract/sent_tx.js'; export { ContractBase, diff --git a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts index 90f04ed86780..92e2ef5ceada 100644 --- a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts +++ b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts @@ -1,7 +1,6 @@ import { type FunctionAbi, FunctionSelector, FunctionType, decodeFromAbi, encodeArguments } from '@aztec/stdlib/abi'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { PrivateKernelProverProfileResult } from '@aztec/stdlib/kernel'; -import type { TxExecutionRequest } from '@aztec/stdlib/tx'; +import type { TxExecutionRequest, TxProfileResult } from '@aztec/stdlib/tx'; import type { Wallet } from '../account/wallet.js'; import type { ExecutionRequestInit } from '../entrypoint/entrypoint.js'; @@ -10,6 +9,18 @@ import { BaseContractInteraction, type SendMethodOptions } from './base_contract export type { SendMethodOptions }; +/** + * Represents the options for simulating a contract function interaction. + * Allows specifying the address from which the view method should be called. + * Disregarded for simulation of public functions + */ +export type ProfileMethodOptions = Pick & { + /** Whether to return gates information or the bytecode/witnesses. */ + profileMode: 'gates' | 'execution-steps' | 'full'; + /** The sender's Aztec address. */ + from?: AztecAddress; +}; + /** * Represents the options for simulating a contract function interaction. * Allows specifying the address from which the view method should be called. @@ -24,14 +35,6 @@ export type SimulateMethodOptions = Pick & { skipFeeEnforcement?: boolean; }; -/** - * The result of a profile() call. - */ -export type ProfileResult = PrivateKernelProverProfileResult & { - /** The result of the transaction as returned by the contract function. */ - returnValues: any; -}; - /** * This is the class that is returned when calling e.g. `contract.methods.myMethod(arg0, arg1)`. * It contains available interactions one can call on a method, including view. @@ -154,30 +157,12 @@ export class ContractFunctionInteraction extends BaseContractInteraction { * * @returns An object containing the function return value and profile result. */ - public async simulateWithProfile(options: SimulateMethodOptions = {}): Promise { + public async profile(options: ProfileMethodOptions = { profileMode: 'gates' }): Promise { if (this.functionDao.functionType == FunctionType.UNCONSTRAINED) { throw new Error("Can't profile an unconstrained function."); } const txRequest = await this.create({ fee: options.fee }); - const simulatedTx = await this.wallet.simulateTx( - txRequest, - true, - options?.from, - options?.skipTxValidation, - undefined, - true, - ); - - const rawReturnValues = - this.functionDao.functionType == FunctionType.PRIVATE - ? simulatedTx.getPrivateReturnValues().nested?.[0].values - : simulatedTx.getPublicReturnValues()?.[0].values; - const rawReturnValuesDecoded = rawReturnValues ? decodeFromAbi(this.functionDao.returnTypes, rawReturnValues) : []; - - return { - returnValues: rawReturnValuesDecoded, - gateCounts: simulatedTx.profileResult!.gateCounts, - }; + return await this.wallet.profileTx(txRequest, options.profileMode, options?.from); } } diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index c6ef5b918609..8b72030e07d0 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -23,6 +23,7 @@ import type { Tx, TxExecutionRequest, TxHash, + TxProfileResult, TxProvingResult, TxReceipt, TxSimulationResult, @@ -97,23 +98,21 @@ export abstract class BaseWallet implements Wallet { proveTx(txRequest: TxExecutionRequest, privateExecutionResult: PrivateExecutionResult): Promise { return this.pxe.proveTx(txRequest, privateExecutionResult); } + profileTx( + txRequest: TxExecutionRequest, + profileMode: 'gates' | 'execution-steps' | 'full', + msgSender?: AztecAddress, + ): Promise { + return this.pxe.profileTx(txRequest, profileMode, msgSender); + } simulateTx( txRequest: TxExecutionRequest, simulatePublic: boolean, msgSender?: AztecAddress, skipTxValidation?: boolean, skipFeeEnforcement?: boolean, - profile?: boolean, ): Promise { - return this.pxe.simulateTx( - txRequest, - simulatePublic, - msgSender, - skipTxValidation, - skipFeeEnforcement, - profile, - this.scopes, - ); + return this.pxe.simulateTx(txRequest, simulatePublic, msgSender, skipTxValidation, skipFeeEnforcement, this.scopes); } sendTx(tx: Tx): Promise { return this.pxe.sendTx(tx); diff --git a/yarn-project/bb-prover/src/prover/bb_native_private_kernel_prover.ts b/yarn-project/bb-prover/src/prover/bb_native_private_kernel_prover.ts index 806d76755a8b..6bcb42c89188 100644 --- a/yarn-project/bb-prover/src/prover/bb_native_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_native_private_kernel_prover.ts @@ -2,8 +2,8 @@ import { runInDirectory } from '@aztec/foundation/fs'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { serializeWitness } from '@aztec/noir-noirc_abi'; import { BundleArtifactProvider } from '@aztec/noir-protocol-circuits-types/client/bundle'; -import type { WitnessMap } from '@aztec/noir-types'; import type { SimulationProvider } from '@aztec/simulator/server'; +import type { PrivateExecutionStep } from '@aztec/stdlib/kernel'; import type { ClientIvcProof } from '@aztec/stdlib/proofs'; import { encode } from '@msgpack/msgpack'; @@ -40,18 +40,26 @@ export class BBNativePrivateKernelProver extends BBPrivateKernelProver { ); } + // TODO(#7371): This is duplicated. + // Longer term we won't use this hacked together msgpack format + // Leaving duplicated as this eventually bb will provide a serialization + // helper for passing to a generic msgpack RPC endpoint. + private async _createClientIvcProofFiles(directory: string, executionSteps: PrivateExecutionStep[]) { + const acirPath = path.join(directory, 'acir.msgpack'); + const witnessPath = path.join(directory, 'witnesses.msgpack'); + await fs.writeFile(acirPath, encode(executionSteps.map(map => map.bytecode))); + await fs.writeFile(witnessPath, encode(executionSteps.map(map => serializeWitness(map.witness)))); + return { + acirPath, + witnessPath, + }; + } + private async _createClientIvcProof( directory: string, - acirs: Buffer[], - witnessStack: WitnessMap[], + executionSteps: PrivateExecutionStep[], ): Promise { - // TODO(#7371): Longer term we won't use this hacked together msgpack format - // and instead properly create the bincode serialization from rust - await fs.writeFile(path.join(directory, 'acir.msgpack'), encode(acirs)); - await fs.writeFile( - path.join(directory, 'witnesses.msgpack'), - encode(witnessStack.map(map => serializeWitness(map))), - ); + await this._createClientIvcProofFiles(directory, executionSteps); const provingResult = await executeBbClientIvcProof( this.bbBinaryPath, directory, @@ -75,10 +83,10 @@ export class BBNativePrivateKernelProver extends BBPrivateKernelProver { return proof; } - public override async createClientIvcProof(acirs: Buffer[], witnessStack: WitnessMap[]): Promise { + public override async createClientIvcProof(executionSteps: PrivateExecutionStep[]): Promise { this.log.info(`Generating Client IVC proof`); const operation = async (directory: string) => { - return await this._createClientIvcProof(directory, acirs, witnessStack); + return await this._createClientIvcProof(directory, executionSteps); }; return await this.runInDirectory(operation); } diff --git a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts index b6acc329b487..5eb576813221 100644 --- a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts @@ -19,6 +19,7 @@ import type { Abi, WitnessMap } from '@aztec/noir-types'; import type { SimulationProvider } from '@aztec/simulator/client'; import type { PrivateKernelProver } from '@aztec/stdlib/interfaces/client'; import type { + PrivateExecutionStep, PrivateKernelCircuitPublicInputs, PrivateKernelInitCircuitPrivateInputs, PrivateKernelInnerCircuitPrivateInputs, @@ -238,7 +239,7 @@ export abstract class BBPrivateKernelProver implements PrivateKernelProver { return kernelProofOutput; } - public createClientIvcProof(_acirs: Buffer[], _witnessStack: WitnessMap[]): Promise { + public createClientIvcProof(_executionSteps: PrivateExecutionStep[]): Promise { throw new Error('Not implemented'); } diff --git a/yarn-project/bb-prover/src/wasm/bb_wasm_private_kernel_prover.ts b/yarn-project/bb-prover/src/wasm/bb_wasm_private_kernel_prover.ts index 01906f424c4c..49d416feba06 100644 --- a/yarn-project/bb-prover/src/wasm/bb_wasm_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/wasm/bb_wasm_private_kernel_prover.ts @@ -3,8 +3,8 @@ import { createLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import { serializeWitness } from '@aztec/noir-noirc_abi'; import type { ArtifactProvider } from '@aztec/noir-protocol-circuits-types/types'; -import type { WitnessMap } from '@aztec/noir-types'; import type { SimulationProvider } from '@aztec/simulator/client'; +import type { PrivateExecutionStep } from '@aztec/stdlib/kernel'; import { ClientIvcProof } from '@aztec/stdlib/proofs'; import { ungzip } from 'pako'; @@ -21,15 +21,15 @@ export abstract class BBWASMPrivateKernelProver extends BBPrivateKernelProver { super(artifactProvider, simulationProvider, log); } - public override async createClientIvcProof(acirs: Buffer[], witnessStack: WitnessMap[]): Promise { + public override async createClientIvcProof(executionSteps: PrivateExecutionStep[]): Promise { const timer = new Timer(); this.log.info(`Generating ClientIVC proof...`); const backend = new AztecClientBackend( - acirs.map(acir => ungzip(acir)), + executionSteps.map(step => ungzip(step.bytecode)), { threads: this.threads, logger: this.log.verbose, wasmPath: process.env.BB_WASM_PATH }, ); - const [proof, vk] = await backend.prove(witnessStack.map(witnessMap => ungzip(serializeWitness(witnessMap)))); + const [proof, vk] = await backend.prove(executionSteps.map(step => ungzip(serializeWitness(step.witness)))); await backend.destroy(); this.log.info(`Generated ClientIVC proof`, { eventName: 'client-ivc-proof-generation', diff --git a/yarn-project/bootstrap.sh b/yarn-project/bootstrap.sh index c68706741ee9..24a25ce49f49 100755 --- a/yarn-project/bootstrap.sh +++ b/yarn-project/bootstrap.sh @@ -31,12 +31,18 @@ function get_projects { } function format { + local arg=${1:-"-w"} find ./*/src -type f -regex '.*\.\(json\|js\|mjs\|cjs\|ts\)$' | \ - parallel -N30 ./node_modules/.bin/prettier --loglevel warn --check + parallel -N30 ./node_modules/.bin/prettier --loglevel warn "$arg" } function lint { - get_projects | parallel "cd {} && ../node_modules/.bin/eslint $@ --cache ./src" + local arg="--fix" + if [ "${1-}" == "--check" ]; then + arg="" + shift 1 + fi + get_projects | parallel "cd {} && ../node_modules/.bin/eslint $@ --cache $arg ./src" } function compile_all { @@ -65,10 +71,10 @@ function compile_all { get_projects | compile_project - cmds=(format) + cmds=('format --check') if [ "${TYPECHECK:-0}" -eq 1 ] || [ "${CI:-0}" -eq 1 ]; then # Fully type check and lint. - cmds+=('yarn tsc -b --emitDeclarationOnly && lint') + cmds+=('yarn tsc -b --emitDeclarationOnly && lint --check') else # We just need the type declarations required for downstream consumers. cmds+=('cd aztec.js && yarn tsc -b --emitDeclarationOnly') @@ -102,11 +108,11 @@ function test_cmds { local hash=$(hash) # These need isolation due to network stack usage (p2p, anvil, etc). for test in {prover-node,p2p,ethereum,aztec}/src/**/*.test.ts; do - if [[ ! "$test" =~ testbench ]]; then - echo "$hash ISOLATE=1 yarn-project/scripts/run_test.sh $test" - else + if [[ "$test" =~ testbench ]]; then # Testbench runs require more memory and CPU. echo "$hash ISOLATE=1 CPUS=18 MEM=12g yarn-project/scripts/run_test.sh $test" + else + echo "$hash ISOLATE=1 yarn-project/scripts/run_test.sh $test" fi done @@ -196,8 +202,8 @@ case "$cmd" in get_projects | compile_project fi ;; - "lint") - lint "$@" + lint|format) + $cmd "$@" ;; test|test_cmds|hash|release|format) $cmd diff --git a/yarn-project/cli-wallet/package.json b/yarn-project/cli-wallet/package.json index d36ffdeed54a..f3631fbaa461 100644 --- a/yarn-project/cli-wallet/package.json +++ b/yarn-project/cli-wallet/package.json @@ -72,8 +72,10 @@ "@aztec/foundation": "workspace:^", "@aztec/kv-store": "workspace:^", "@aztec/noir-contracts.js": "workspace:^", + "@aztec/noir-noirc_abi": "workspace:^", "@aztec/pxe": "workspace:^", "@aztec/stdlib": "workspace:^", + "@msgpack/msgpack": "^3.0.0-beta2", "commander": "^12.1.0", "inquirer": "^10.1.8", "source-map-support": "^0.5.21", diff --git a/yarn-project/cli-wallet/src/cmds/index.ts b/yarn-project/cli-wallet/src/cmds/index.ts index 12a4d434cd04..9a89143eaa96 100644 --- a/yarn-project/cli-wallet/src/cmds/index.ts +++ b/yarn-project/cli-wallet/src/cmds/index.ts @@ -36,7 +36,7 @@ import { createArgsOption, createArtifactOption, createContractAddressOption, - createProfileOption, + createDebugExecutionStepsDirOption, createTypeOption, integerArgParser, parseGasFees, @@ -316,7 +316,6 @@ export function injectCommands( createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), ) .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) - .addOption(createProfileOption()) .action(async (functionName, _options, command) => { const { simulate } = await import('./simulate.js'); const options = command.optsWithGlobals(); @@ -327,14 +326,46 @@ export function injectCommands( from: parsedFromAddress, rpcUrl, secretKey, - profile, } = options; const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); const artifactPath = await artifactPathFromPromiseOrAlias(artifactPathPromise, contractAddress, db); - await simulate(wallet, functionName, args, artifactPath, contractAddress, profile, log); + await simulate(wallet, functionName, args, artifactPath, contractAddress, log); + }); + + program + .command('profile') + .description('Profiles a private function by counting the unconditional operations in its execution steps') + .argument('', 'Name of function to simulate') + .addOption(pxeOption) + .addOption(createArgsOption(false, db)) + .addOption(createContractAddressOption(db)) + .addOption(createArtifactOption(db)) + .addOption(createDebugExecutionStepsDirOption()) + .addOption( + createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), + ) + .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) + .action(async (functionName, _options, command) => { + const { profile } = await import('./profile.js'); + const options = command.optsWithGlobals(); + const { + args, + contractArtifact: artifactPathPromise, + contractAddress, + from: parsedFromAddress, + rpcUrl, + secretKey, + debugExecutionStepsDir, + } = options; + + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); + const wallet = await getWalletWithScopes(account, db); + const artifactPath = await artifactPathFromPromiseOrAlias(artifactPathPromise, contractAddress, db); + await profile(wallet, functionName, args, artifactPath, contractAddress, debugExecutionStepsDir, log); }); program diff --git a/yarn-project/cli-wallet/src/cmds/profile.ts b/yarn-project/cli-wallet/src/cmds/profile.ts new file mode 100644 index 000000000000..f4c686e87040 --- /dev/null +++ b/yarn-project/cli-wallet/src/cmds/profile.ts @@ -0,0 +1,70 @@ +import { type AccountWalletWithSecretKey, type AztecAddress, Contract } from '@aztec/aztec.js'; +import { prepTx } from '@aztec/cli/utils'; +import type { LogFn } from '@aztec/foundation/log'; +import { serializeWitness } from '@aztec/noir-noirc_abi'; +import type { PrivateExecutionStep } from '@aztec/stdlib/kernel'; +import type { TxProfileResult } from '@aztec/stdlib/tx'; + +import { encode } from '@msgpack/msgpack'; +import { promises as fs } from 'fs'; +import path from 'path'; +import { format } from 'util'; + +function printProfileResult(result: TxProfileResult, log: LogFn) { + // TODO(AD): this is a bit misleading - the maximum gate count of any piece is as important + // as the total gate count. We should probably print both. + log(format('\nGate count per circuit:')); + let acc = 0; + result.executionSteps.forEach(r => { + acc += r.gateCount!; + log( + format( + ' ', + r.functionName.padEnd(50), + 'Gates:', + r.gateCount!.toLocaleString(), + '\tSubtotal:', + acc.toLocaleString(), + ), + ); + }); + log(format('\nTotal gates:', acc.toLocaleString())); +} + +// TODO(#7371): This is duplicated. +// Longer term we won't use this hacked together msgpack format +// Leaving duplicated as this eventually bb will provide a serialization +// helper for passing to a generic msgpack RPC endpoint. +async function _createClientIvcProofFiles(directory: string, executionSteps: PrivateExecutionStep[]) { + const acirPath = path.join(directory, 'acir.msgpack'); + const witnessPath = path.join(directory, 'witnesses.msgpack'); + await fs.writeFile(acirPath, encode(executionSteps.map(map => map.bytecode))); + await fs.writeFile(witnessPath, encode(executionSteps.map(map => serializeWitness(map.witness)))); + return { + acirPath, + witnessPath, + }; +} + +export async function profile( + wallet: AccountWalletWithSecretKey, + functionName: string, + functionArgsIn: any[], + contractArtifactPath: string, + contractAddress: AztecAddress, + debugOutputPath: string | undefined, + log: LogFn, +) { + const profileMode = debugOutputPath ? ('full' as const) : ('gates' as const); + const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); + + const contract = await Contract.at(contractAddress, contractArtifact, wallet); + const call = contract.methods[functionName](...functionArgs); + + const result = await call.profile({ profileMode }); + printProfileResult(result, log); + if (debugOutputPath) { + log(`Debug output written to ${debugOutputPath} (witnesses.msgpack and acir.msgpack)`); + await _createClientIvcProofFiles(debugOutputPath, result.executionSteps); + } +} diff --git a/yarn-project/cli-wallet/src/cmds/simulate.ts b/yarn-project/cli-wallet/src/cmds/simulate.ts index b26ff4b7e825..589778bb500e 100644 --- a/yarn-project/cli-wallet/src/cmds/simulate.ts +++ b/yarn-project/cli-wallet/src/cmds/simulate.ts @@ -1,30 +1,15 @@ -import { type AccountWalletWithSecretKey, type AztecAddress, Contract, type ProfileResult } from '@aztec/aztec.js'; +import { type AccountWalletWithSecretKey, type AztecAddress, Contract } from '@aztec/aztec.js'; import { prepTx } from '@aztec/cli/utils'; import type { LogFn } from '@aztec/foundation/log'; import { format } from 'util'; -function printProfileResult(result: ProfileResult, log: LogFn) { - log(format('\nSimulation result:')); - log(format('Return value:', JSON.stringify(result.returnValues, null, 2))); - - log(format('\nGate count per circuit:')); - let acc = 0; - result.gateCounts.forEach(r => { - acc += r.gateCount; - log(format(' ', r.circuitName.padEnd(50), 'Gates:', r.gateCount.toLocaleString(), '\tAcc:', acc.toLocaleString())); - }); - - log(format('\nTotal gates:', acc.toLocaleString())); -} - export async function simulate( wallet: AccountWalletWithSecretKey, functionName: string, functionArgsIn: any[], contractArtifactPath: string, contractAddress: AztecAddress, - profile: boolean, log: LogFn, ) { const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); @@ -32,11 +17,6 @@ export async function simulate( const contract = await Contract.at(contractAddress, contractArtifact, wallet); const call = contract.methods[functionName](...functionArgs); - if (profile) { - const result = await call.simulateWithProfile(); - printProfileResult(result, log); - } else { - const result = await call.simulate(); - log(format('\nSimulation result: ', result, '\n')); - } + const result = await call.simulate(); + log(format('\nSimulation result: ', result, '\n')); } diff --git a/yarn-project/cli-wallet/src/utils/options/options.ts b/yarn-project/cli-wallet/src/utils/options/options.ts index f9aff03c144d..b4a6b5a2a1a6 100644 --- a/yarn-project/cli-wallet/src/utils/options/options.ts +++ b/yarn-project/cli-wallet/src/utils/options/options.ts @@ -103,6 +103,13 @@ export function createContractAddressOption(db?: WalletDB) { .makeOptionMandatory(true); } +export function createDebugExecutionStepsDirOption() { + return new Option( + '--debug-execution-steps-dir
', + 'Directory to write execution step artifacts for bb profiling/debugging.', + ).makeOptionMandatory(false); +} + export function artifactPathParser(filePath: string, db?: WalletDB) { if (filePath.includes('@')) { const [pkg, contractName] = filePath.split('@'); @@ -140,13 +147,6 @@ export function createArtifactOption(db?: WalletDB) { .makeOptionMandatory(false); } -export function createProfileOption() { - return new Option( - '-p, --profile', - 'Run the real prover and get the gate count for each function in the transaction.', - ).default(false); -} - async function contractArtifactFromWorkspace(pkg?: string, contractName?: string) { const cwd = process.cwd(); try { diff --git a/yarn-project/cli-wallet/test/flows/basic.sh b/yarn-project/cli-wallet/test/flows/basic.sh index de95ba79a4d4..8cce03a07f53 100755 --- a/yarn-project/cli-wallet/test/flows/basic.sh +++ b/yarn-project/cli-wallet/test/flows/basic.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Basic flow" diff --git a/yarn-project/cli-wallet/test/flows/create_account_pay_native.sh b/yarn-project/cli-wallet/test/flows/create_account_pay_native.sh index ca6d49eb5db0..c2fc0d290058 100755 --- a/yarn-project/cli-wallet/test/flows/create_account_pay_native.sh +++ b/yarn-project/cli-wallet/test/flows/create_account_pay_native.sh @@ -1,6 +1,7 @@ #!/bin/bash -set -e -source ../utils/setup.sh + +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Create an account and deploy using native fee payment with bridging" diff --git a/yarn-project/cli-wallet/test/flows/no_alias.sh b/yarn-project/cli-wallet/test/flows/no_alias.sh index 22bbf0ddac94..b55479d017d1 100755 --- a/yarn-project/cli-wallet/test/flows/no_alias.sh +++ b/yarn-project/cli-wallet/test/flows/no_alias.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Basic flow, no aliases" diff --git a/yarn-project/cli-wallet/test/flows/private_authwit_transfer.sh b/yarn-project/cli-wallet/test/flows/private_authwit_transfer.sh index 1c6f9c3a0495..0359cc38c19c 100755 --- a/yarn-project/cli-wallet/test/flows/private_authwit_transfer.sh +++ b/yarn-project/cli-wallet/test/flows/private_authwit_transfer.sh @@ -1,15 +1,15 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Private transfer on behalf of other" MINT_AMOUNT=42 TRANSFER_AMOUNT=21 -source $TEST_FOLDER/shared/deploy_main_account_and_token.sh -source $TEST_FOLDER/shared/mint_to_private.sh $MINT_AMOUNT main -source $TEST_FOLDER/shared/create_funded_account.sh operator +source $flows/shared/deploy_main_account_and_token.sh +source $flows/shared/mint_to_private.sh $MINT_AMOUNT main +source $flows/shared/create_funded_account.sh operator aztec-wallet create-secret -a auth_nonce aztec-wallet create-authwit transfer_in_private operator -ca token --args accounts:main accounts:operator $TRANSFER_AMOUNT secrets:auth_nonce -f main diff --git a/yarn-project/cli-wallet/test/flows/private_transfer.sh b/yarn-project/cli-wallet/test/flows/private_transfer.sh index 01fa122cd8a0..fd7f252a7216 100755 --- a/yarn-project/cli-wallet/test/flows/private_transfer.sh +++ b/yarn-project/cli-wallet/test/flows/private_transfer.sh @@ -1,14 +1,14 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Private transfer" MINT_AMOUNT=42 TRANSFER_AMOUNT=21 -source $TEST_FOLDER/shared/deploy_main_account_and_token.sh -source $TEST_FOLDER/shared/mint_to_private.sh $MINT_AMOUNT main +source $flows/shared/deploy_main_account_and_token.sh +source $flows/shared/mint_to_private.sh $MINT_AMOUNT main aztec-wallet create-account -a recipient --register-only diff --git a/yarn-project/cli-wallet/test/flows/profile.sh b/yarn-project/cli-wallet/test/flows/profile.sh index 9c43bfe2812f..c0b706a7f453 100755 --- a/yarn-project/cli-wallet/test/flows/profile.sh +++ b/yarn-project/cli-wallet/test/flows/profile.sh @@ -1,34 +1,42 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Profile private transfer with authwit" -echo -warn ////////////////////////////////////////////////////////////////////////////// -warn // Note: this test requires proving to be enabled to show meaningful output // -warn ////////////////////////////////////////////////////////////////////////////// -echo - -source $TEST_FOLDER/shared/deploy_main_account_and_token.sh -source $TEST_FOLDER/shared/mint_to_private.sh 100 main -source $TEST_FOLDER/shared/create_funded_account.sh operator +source $flows/shared/deploy_main_account_and_token.sh +source $flows/shared/mint_to_private.sh 100 main +source $flows/shared/create_funded_account.sh operator # Create an authwit for the operator to transfer tokens from the main account to operator's own account. aztec-wallet create-secret -a auth_nonce aztec-wallet create-authwit transfer_in_private operator -ca token --args accounts:main accounts:operator 100 secrets:auth_nonce -f main aztec-wallet add-authwit authwits:last main -f operator -# Simulate and profile `transfer_in_private` -GATE_COUNT=$(aztec-wallet simulate --profile transfer_in_private -ca token --args accounts:main accounts:operator 100 secrets:auth_nonce -f operator | grep "Total gates:" | awk '{print $3}') +# Write out debug execution steps (used for debugging prover development). +tmp=$(mktemp -d) +function cleanup { + rm -rf $tmp +} +trap cleanup EXIT SIGINT +aztec-wallet profile transfer_in_private --debug-execution-steps-dir $tmp -ca token --args accounts:main accounts:operator 100 secrets:auth_nonce -f operator +# Crude check, check that $tmp is over one megabyte, the validity of these files is checked more directly in the client ivc benches. +size=$(du -sb $tmp | awk '{print $1}') +if [ "$size" -lt 1000000 ]; then + echo "Debug execution steps directory is less than 1MB, something went wrong." + exit 1 +fi + +# Profile gate counts for `transfer_in_private` +gate_count=$(aztec-wallet profile transfer_in_private -ca token --args accounts:main accounts:operator 100 secrets:auth_nonce -f operator | grep "Total gates:" | awk '{print $3}') -echo "GATE_COUNT: $GATE_COUNT" +echo "GATE_COUNT: $gate_count" # Verify gate count is present in the output -if [ -z "$GATE_COUNT" ]; then - GATE_COUNT_SET=0 +if [ -z "$gate_count" ]; then + gate_count_set=0 else - GATE_COUNT_SET=1 + gate_count_set=1 fi -assert_eq $GATE_COUNT_SET 1 +assert_eq $gate_count_set 1 diff --git a/yarn-project/cli-wallet/test/flows/public_authwit_transfer.sh b/yarn-project/cli-wallet/test/flows/public_authwit_transfer.sh index 8712dce51307..b04f28f28e71 100755 --- a/yarn-project/cli-wallet/test/flows/public_authwit_transfer.sh +++ b/yarn-project/cli-wallet/test/flows/public_authwit_transfer.sh @@ -1,15 +1,15 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Public transfer on behalf of other" MINT_AMOUNT=42 TRANSFER_AMOUNT=21 -source $TEST_FOLDER/shared/deploy_main_account_and_token.sh -source $TEST_FOLDER/shared/mint_to_public.sh $MINT_AMOUNT main -source $TEST_FOLDER/shared/create_funded_account.sh operator +source $flows/shared/deploy_main_account_and_token.sh +source $flows/shared/mint_to_public.sh $MINT_AMOUNT main +source $flows/shared/create_funded_account.sh operator aztec-wallet create-secret -a auth_nonce aztec-wallet authorize-action transfer_in_public operator -ca token --args accounts:main accounts:operator $TRANSFER_AMOUNT secrets:auth_nonce -f main diff --git a/yarn-project/cli-wallet/test/flows/shared/create_funded_account.sh b/yarn-project/cli-wallet/test/flows/shared/create_funded_account.sh index 973042d30b6a..fbb8d752b2db 100644 --- a/yarn-project/cli-wallet/test/flows/shared/create_funded_account.sh +++ b/yarn-project/cli-wallet/test/flows/shared/create_funded_account.sh @@ -6,7 +6,7 @@ aztec-wallet create-account -a $ALIAS --register-only aztec-wallet bridge-fee-juice 100000000000000000 $ALIAS --mint --no-wait # The following produces two blocks, allowing the claim to be used in the next block. -source $TEST_FOLDER/shared/deploy_token.sh tmp-token-$ALIAS $ALIAS +source $flows/shared/deploy_token.sh tmp-token-$ALIAS $ALIAS # Deploying the account, paying the fee via bridging fee juice from L1 using the claim created above. aztec-wallet deploy-account -f $ALIAS --payment method=fee_juice,claim diff --git a/yarn-project/cli-wallet/test/flows/shared/deploy_main_account_and_token.sh b/yarn-project/cli-wallet/test/flows/shared/deploy_main_account_and_token.sh index b8164d438cd6..475ed87f507f 100644 --- a/yarn-project/cli-wallet/test/flows/shared/deploy_main_account_and_token.sh +++ b/yarn-project/cli-wallet/test/flows/shared/deploy_main_account_and_token.sh @@ -8,7 +8,7 @@ aztec-wallet bridge-fee-juice 100000000000000000 $ACCOUNT_ALIAS --mint --no-wait # Deploy token contract and set the main account as a minter. # The following produces two blocks, allowing the claim to be used in the next block. -source $TEST_FOLDER/shared/deploy_token.sh $TOKEN_ALIAS $ACCOUNT_ALIAS +source $flows/shared/deploy_token.sh $TOKEN_ALIAS $ACCOUNT_ALIAS # Deploying the account, paying the fee via bridging fee juice from L1 using the claim created above. aztec-wallet deploy-account -f $ACCOUNT_ALIAS --payment method=fee_juice,claim diff --git a/yarn-project/cli-wallet/test/flows/shared/deploy_sponsored_fpc_and_token.sh b/yarn-project/cli-wallet/test/flows/shared/deploy_sponsored_fpc_and_token.sh index 0c51b2e22dc7..2e38d4f29972 100644 --- a/yarn-project/cli-wallet/test/flows/shared/deploy_sponsored_fpc_and_token.sh +++ b/yarn-project/cli-wallet/test/flows/shared/deploy_sponsored_fpc_and_token.sh @@ -17,7 +17,7 @@ claimSecret=$(retrieve claimSecret) messageLeafIndex=$(retrieve messageLeafIndex) # The following produces two blocks, allowing the claim to be used in the next block. -source $TEST_FOLDER/shared/deploy_token.sh $TOKEN_ALIAS test1 +source $flows/shared/deploy_token.sh $TOKEN_ALIAS test1 # Claim the fee juice by calling the fee juice contract directly (address = 5). feeJuice=0x0000000000000000000000000000000000000000000000000000000000000005 diff --git a/yarn-project/cli-wallet/test/flows/shared/setup.sh b/yarn-project/cli-wallet/test/flows/shared/setup.sh new file mode 100644 index 000000000000..8a52cbefb544 --- /dev/null +++ b/yarn-project/cli-wallet/test/flows/shared/setup.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +# Colors +r="\033[31m" # Red +g="\033[32m" # Green +y="\033[33m" # Yellow +b="\033[34m" # Blue +p="\033[35m" # Purple +rs="\033[0m" # Reset +bold="\033[1m" + +# Call our cli-wallet entrypoint as the default command. +command="${COMMAND:-"node --no-warnings $root/yarn-project/cli-wallet/dest/bin/index.js"}" +flows=$(pwd) +cd $root/noir-projects/noir-contracts + +function aztec-wallet { + echo_header aztec-wallet "$@" + $command "$@" +} + +function assert_eq { + if [ $1 = $2 ]; then + echo + echo -e "✅ ${bold}${g}Pass${rs}" + echo + echo "---------------------------------" + echo + else + echo + echo -e "❌ ${bold}${rs}Fail${rs}" + echo + exit 1 + fi +} + +function test_title { + echo -e "🧪 ${bold}${b}Test: $@${rs}" + echo +} + +function warn { + echo -e "${bold}${y}$@${rs}" +} + +function err { + echo -e "${bold}${r}$@${rs}" +} + +function bold { + echo -e "${bold}$@${rs}" +} + +function section { + echo + bold "➡️ $@" + echo +} + +warn "aztec-wallet is $command" +echo diff --git a/yarn-project/cli-wallet/test/flows/shield_and_transfer.sh b/yarn-project/cli-wallet/test/flows/shield_and_transfer.sh index 1b23d8190d1c..dcd04bf11c34 100755 --- a/yarn-project/cli-wallet/test/flows/shield_and_transfer.sh +++ b/yarn-project/cli-wallet/test/flows/shield_and_transfer.sh @@ -1,14 +1,14 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Shield and private transfer" MINT_AMOUNT=42 TRANSFER_AMOUNT=21 -source $TEST_FOLDER/shared/deploy_main_account_and_token.sh -source $TEST_FOLDER/shared/mint_to_private.sh $MINT_AMOUNT main +source $flows/shared/deploy_main_account_and_token.sh +source $flows/shared/mint_to_private.sh $MINT_AMOUNT main aztec-wallet create-account -a recipient --register-only diff --git a/yarn-project/cli-wallet/test/flows/sponsored_create_account_and_mint.sh b/yarn-project/cli-wallet/test/flows/sponsored_create_account_and_mint.sh index af9674ed4f58..d1b702bd0029 100755 --- a/yarn-project/cli-wallet/test/flows/sponsored_create_account_and_mint.sh +++ b/yarn-project/cli-wallet/test/flows/sponsored_create_account_and_mint.sh @@ -1,12 +1,12 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Create new account and mint privately. Fees paid by a sponsor." MINT_AMOUNT=42 -source $TEST_FOLDER/shared/deploy_sponsored_fpc_and_token.sh +source $flows/shared/deploy_sponsored_fpc_and_token.sh PAYMENT_METHOD="--payment method=fpc-sponsored,fpc=contracts:sponsoredFPC" diff --git a/yarn-project/cli-wallet/test/flows/tx_management.sh b/yarn-project/cli-wallet/test/flows/tx_management.sh index 275b492f570f..30a0174a86d6 100755 --- a/yarn-project/cli-wallet/test/flows/tx_management.sh +++ b/yarn-project/cli-wallet/test/flows/tx_management.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -e -source ../utils/setup.sh +source $(git rev-parse --show-toplevel)/ci3/source +source shared/setup.sh test_title "Tx management" diff --git a/yarn-project/cli-wallet/test/test.sh b/yarn-project/cli-wallet/test/test.sh index 6f26c8ed8ec0..40bf5a6e184d 100755 --- a/yarn-project/cli-wallet/test/test.sh +++ b/yarn-project/cli-wallet/test/test.sh @@ -3,8 +3,6 @@ set -e LOCATION=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -NOIR_CONTRACTS_PATH=$(realpath ../../../noir-projects/noir-contracts) - POSITIONAL_ARGS=() while [[ $# -gt 0 ]]; do @@ -39,8 +37,6 @@ export WALLET_DATA_DIRECTORY="${LOCATION}/data" rm -rf $WALLET_DATA_DIRECTORY mkdir -p $WALLET_DATA_DIRECTORY -COMMAND="node --no-warnings $(realpath ../dest/bin/index.js)" - if [ "${REMOTE_PXE:-}" = "1" ]; then echo "Using remote PXE" export REMOTE_PXE="1" @@ -48,12 +44,13 @@ fi if [ "${USE_DOCKER:-}" = "1" ]; then echo "Using docker" - COMMAND="aztec-wallet" + # overwrite default command in flows + export COMMAND="aztec-wallet" fi cd ./flows for file in $(ls *.sh | grep ${FILTER:-"."}); do - ./$file $COMMAND $NOIR_CONTRACTS_PATH + ./$file done diff --git a/yarn-project/cli-wallet/test/utils/setup.sh b/yarn-project/cli-wallet/test/utils/setup.sh deleted file mode 100644 index d78212413a5e..000000000000 --- a/yarn-project/cli-wallet/test/utils/setup.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash - -# Colors -r="\033[31m" # Red -g="\033[32m" # Green -y="\033[33m" # Yellow -b="\033[34m" # Blue -p="\033[35m" # Purple -rs="\033[0m" # Reset -bold="\033[1m" - -SETUP_LOCATION=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -export TEST_FOLDER=$( cd $SETUP_LOCATION/../flows &> /dev/null && pwd ) - -if [ $# -eq 4 ]; then - COMMAND="$1 $2 $3" - alias aztec-wallet="${COMMAND}" - cd $4 # noir-projects/noir-contracts folder -else - COMMAND=$1 - alias aztec-wallet="${COMMAND}" - cd $2 # noir-projects/noir-contracts folder -fi - -aztec-wallet () { - command $COMMAND $@ -} - -assert_eq () { - if [ $1 = $2 ]; then - echo - echo -e "✅ ${bold}${g}Pass${rs}" - echo - echo "---------------------------------" - echo - else - echo - echo -e "❌ ${bold}${rs}Fail${rs}" - echo - exit 1 - fi -} - -test_title () { - echo -e "🧪 ${bold}${b}Test: $@${rs}" - echo -} - -warn () { - echo -e "${bold}${y}$@${rs}" -} - -err () { - echo -e "${bold}${r}$@${rs}" -} - -bold() { - echo -e "${bold}$@${rs}" -} - -section() { - echo - bold "➡️ $@" - echo -} - -warn "aztec-wallet is $COMMAND" -echo \ No newline at end of file diff --git a/yarn-project/end-to-end/.gitignore b/yarn-project/end-to-end/.gitignore index 4f89329dc51b..ed381b4a6e9d 100644 --- a/yarn-project/end-to-end/.gitignore +++ b/yarn-project/end-to-end/.gitignore @@ -1,3 +1,5 @@ joblog.txt results -web/main.js* \ No newline at end of file +bench-out +private-flows-ivc-inputs-out +web/main.js* diff --git a/yarn-project/end-to-end/bootstrap.sh b/yarn-project/end-to-end/bootstrap.sh index 0ec8781efed3..62e12aebd977 100755 --- a/yarn-project/end-to-end/bootstrap.sh +++ b/yarn-project/end-to-end/bootstrap.sh @@ -113,7 +113,7 @@ function test_cmds { echo "$prefix simple e2e_token_contract/transfer_to_public" echo "$prefix simple e2e_token_contract/transfer.test" - # compose-based tests + # compose-based tests (use running sandbox) echo "$prefix compose composed/docs_examples" echo "$prefix compose composed/e2e_pxe" echo "$prefix compose composed/e2e_sandbox_example" @@ -125,6 +125,13 @@ function test_cmds { echo "$prefix compose guides/writing_an_account_contract" echo "$prefix compose e2e_token_bridge_tutorial_test" echo "$prefix compose uniswap_trade_on_l1_from_l2" + + # compose-based tests with custom scripts + for flow in ../cli-wallet/test/flows/*.sh; do + # Note these scripts are ran directly by docker-compose.yml because it ends in '.sh'. + # Set LOG_LEVEL=info for a better output experience. Deeper debugging should happen with other e2e tests. + echo "$hash LOG_LEVEL=info $run_test_script compose $flow" + done } function test { @@ -132,17 +139,42 @@ function test { test_cmds | filter_test_cmds | parallelise } +# Entrypoint for barretenberg benchmarks that rely on captured e2e inputs. +function generate_private_ivc_inputs { + export CAPTURE_IVC_FOLDER=private-flows-ivc-inputs-out + rm -rf "$CAPTURE_IVC_FOLDER" && mkdir -p "$CAPTURE_IVC_FOLDER" + if cache_download bb-client-ivc-captures-$hash.tar.gz; then + return + fi + if [ -n "${DOWNLOAD_ONLY:-}" ]; then + echo "Could not find ivc inputs cached!" + return + fi + # Running these again separately from tests is a bit of a hack, + # but we need to ensure test caching does not get in the way. + echo " + scripts/run_test.sh simple e2e_amm + scripts/run_test.sh simple e2e_nft + scripts/run_test.sh simple e2e_blacklist_token_contract/transfer_private + " | parallel --line-buffer --halt now,fail=1 + cache_upload bb-client-ivc-captures-$hash.tar.gz $CAPTURE_IVC_FOLDER +} + function bench { + rm -rf bench-out mkdir -p bench-out - BENCH_OUTPUT=bench-out/yp-bench.json scripts/run_test.sh simple bench_build_block - cache_upload yarn-project-bench-results-$COMMIT_HASH.tar.gz ./bench-out/yp-bench.json + if cache_download yarn-project-bench-results-$hash.tar.gz; then + return + fi + BENCH_OUTPUT=$root/yarn-project/end-to-end/bench-out/yp-bench.json scripts/run_test.sh simple bench_build_block + cache_upload yarn-project-bench-results-$hash.tar.gz ./bench-out/yp-bench.json } case "$cmd" in "clean") git clean -fdx ;; - test|test_cmds|bench) + test|test_cmds|bench|generate_private_ivc_inputs) $cmd ;; *) diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 6b36e7cd7ce2..0f92786ebc12 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -44,6 +44,7 @@ "@aztec/l1-artifacts": "workspace:^", "@aztec/merkle-tree": "workspace:^", "@aztec/noir-contracts.js": "workspace:^", + "@aztec/noir-noirc_abi": "workspace:^", "@aztec/noir-protocol-circuits-types": "workspace:^", "@aztec/p2p": "workspace:^", "@aztec/protocol-contracts": "workspace:^", @@ -58,6 +59,7 @@ "@aztec/world-state": "workspace:^", "@iarna/toml": "^2.2.5", "@jest/globals": "^29.5.0", + "@msgpack/msgpack": "^3.0.0-beta2", "@noble/curves": "^1.0.0", "@swc/core": "^1.4.11", "@swc/jest": "^0.2.36", diff --git a/yarn-project/end-to-end/scripts/docker-compose-wallet.yml b/yarn-project/end-to-end/scripts/docker-compose-wallet.yml deleted file mode 100644 index 19e49feb5b79..000000000000 --- a/yarn-project/end-to-end/scripts/docker-compose-wallet.yml +++ /dev/null @@ -1,45 +0,0 @@ -version: '3' -services: - fork: - image: aztecprotocol/foundry:25f24e677a6a32a62512ad4f561995589ac2c7dc-${ARCH_TAG:-amd64} - pull_policy: always - entrypoint: 'anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337' - ports: - - 8545:8545 - - sandbox: - image: aztecprotocol/aztec:${AZTEC_DOCKER_TAG:-latest} - command: 'start --sandbox' - environment: - LOG_LEVEL: ${LOG_LEVEL:-verbose} - ETHEREUM_HOSTS: http://fork:8545 - L1_CHAIN_ID: 31337 - FORCE_COLOR: ${FORCE_COLOR:-1} - ARCHIVER_POLLING_INTERVAL_MS: 50 - P2P_BLOCK_CHECK_INTERVAL_MS: 50 - SEQ_TX_POLLING_INTERVAL_MS: 50 - WS_BLOCK_CHECK_INTERVAL_MS: 50 - ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 - HARDWARE_CONCURRENCY: ${HARDWARE_CONCURRENCY:-} - expose: - - '8080' - - end-to-end: - image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} - environment: - LOG_LEVEL: ${LOG_LEVEL:-verbose} - ETHEREUM_HOSTS: http://fork:8545 - L1_CHAIN_ID: 31337 - FORCE_COLOR: ${FORCE_COLOR:-1} - PXE_URL: http://sandbox:8080 - working_dir: /usr/src/yarn-project/cli-wallet/test - entrypoint: > - sh -c ' - while ! nc -z sandbox 8080; do sleep 1; done; - ./test.sh - ' - volumes: - - ../log:/usr/src/yarn-project/end-to-end/log:rw - depends_on: - - sandbox - - fork diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index 66981af22877..1392c099d178 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -46,6 +46,10 @@ services: FORCE_COLOR: ${FORCE_COLOR:-1} PXE_URL: http://sandbox:8080 AZTEC_NODE_URL: http://sandbox:8080 + # Allow git usage despite different ownership. Relevant for script tests. + GIT_CONFIG_GLOBAL: /root/aztec-packages/build-images/src/home/.gitconfig + # For debugging scripts that were passed as TEST. + BUILD_SYSTEM_DEBUG: ${BUILD_SYSTEM_DEBUG:-} # Voodoo explanation. # Why do we do all this? To ensure tests can have their own traps for cleanup. Namely up_quick_start... # Propagating the signal (e.g. via exec) isn't much use, as jest provides no mechanism for cleanup on signals. diff --git a/yarn-project/end-to-end/scripts/e2e_compose_test.sh b/yarn-project/end-to-end/scripts/e2e_compose_test.sh deleted file mode 100755 index 86fec03abc5e..000000000000 --- a/yarn-project/end-to-end/scripts/e2e_compose_test.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -# Usage: ./e2e_compose_test.sh -# Optional environment variables: -# COMPOSE_FILE (default: ./scripts/docker-compose-images.yml) -# LOG_LEVEL (default: "verbose") -# HARDWARE_CONCURRENCY (default: "") -# AZTEC_DOCKER_TAG (default: current git commit) - -set -eu - -# Note: We export variables to make them available to the docker compose file - -# Main positional parameter -export TEST="$1" -# Variables with defaults -COMPOSE_FILE="${COMPOSE_FILE:-./scripts/docker-compose-images.yml}" -export LOG_LEVEL="${LOG_LEVEL:-verbose}" -export HARDWARE_CONCURRENCY="${HARDWARE_CONCURRENCY:-}" -export AZTEC_DOCKER_TAG="${AZTEC_DOCKER_TAG:-$(git rev-parse HEAD)}" - -# Set the ARCH_TAG variable for docker compose -ARCH=$(uname -m) -if [ "$ARCH" = "arm64" ] || [ "$ARCH" = "aarch64" ]; then - export ARCH_TAG="arm64" -else - export ARCH_TAG="amd64" -fi - -FORCE_BUILD="${FORCE_BUILD:-true}" - -# Compute project_name -export JOB_NAME=$(echo "$TEST" | sed 's/\./_/g' | sed 's/\//_/g') - -# Determine CMD -if docker compose >/dev/null 2>&1; then - CMD="docker compose" -else - CMD="docker-compose" -fi - -# Optimize image building -if ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotocol/aztec:$AZTEC_DOCKER_TAG" || - ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG"; then - echo "Docker images not found." - exit 1 -fi - -# Run docker compose -$CMD -p "$JOB_NAME" -f "$COMPOSE_FILE" up --exit-code-from=end-to-end --force-recreate diff --git a/yarn-project/end-to-end/scripts/e2e_test.sh b/yarn-project/end-to-end/scripts/e2e_test.sh deleted file mode 100755 index c1150ef0f95b..000000000000 --- a/yarn-project/end-to-end/scripts/e2e_test.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/bash - -# Usage: ./e2e_test.sh <...extra_args> -# Optional environment variables: -# HARDWARE_CONCURRENCY (default: "") -# FAKE_PROOFS (default: "") -# COMPOSE_FILE (default: "./scripts/docker-compose-images.yml") - -set -eu - -e2e_root=$(git rev-parse --show-toplevel)/yarn-project/end-to-end -# go above this folder -cd "$e2e_root" -# Main positional parameter -export TEST="$1" -shift - -# Default values for environment variables -export HARDWARE_CONCURRENCY="${HARDWARE_CONCURRENCY:-}" -export FAKE_PROOFS="${FAKE_PROOFS:-}" -export COMPOSE_FILE="${COMPOSE_FILE:-./scripts/docker-compose-images.yml}" -export AZTEC_DOCKER_TAG=${AZTEC_DOCKER_TAG:-$(git rev-parse HEAD)} - -# Function to load test configuration -load_test_config() { - local test_name="$1" - yq e ".tests.${test_name}" "scripts/e2e_test_config.yml" -} - -# Check if Docker images exist -if ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep -q "aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG"; then - echo "Docker images not found." - exit 1 -fi - -# Load test configuration -test_config=$(load_test_config "$TEST") - -# Determine the test path -test_path=$(echo "$test_config" | yq e '.test_path // ""' -) -if [ -z "$test_path" ]; then - test_path="${TEST}" -fi - -# Check for ignore_failures -ignore_failures=$(echo "$test_config" | yq e '.ignore_failures // false' -) -if [ "$ignore_failures" = "true" ]; then - echo "Ignoring failures for test $TEST" -fi - -# Init output folder -mkdir -p ./out - -# Check if the test uses docker compose -if [ "$(echo "$test_config" | yq e '.use_compose // false' -)" = "true" ]; then - "$e2e_root/scripts/e2e_compose_test.sh" "$test_path" "$@" || [ "$ignore_failures" = "true" ] -elif [ "$(echo "$test_config" | yq e '.with_alerts // false' -)" = "true" ]; then - "$e2e_root/scripts/e2e_test_with_alerts.sh" "$test_path" "$@" || [ "$ignore_failures" = "true" ] -else - # Set environment variables - while IFS='=' read -r key value; do - export "$key=$value" - done < <(echo "$test_config" | yq e '.env // {} | to_entries | .[] | .key + "=" + .value' -) - - # Check for custom command - custom_command=$(echo "$test_config" | yq e '.command // ""' -) - env_args=$(echo "$test_config" | yq e '.env // {} | to_entries | .[] | "-e " + .key + "=" + .value' - | tr '\n' ' ') - if [ -n "$custom_command" ]; then - /bin/bash -c "$custom_command" || [ "$ignore_failures" = "true" ] - else - set -x - # Run the default docker command - docker run \ - -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" \ - -e FAKE_PROOFS="$FAKE_PROOFS" \ - -e FORCE_COLOR=1 \ - $env_args \ - --volume "$(pwd)"/out:/out \ - --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG \ - "$test_path" "$@" || [ "$ignore_failures" = "true" ] - fi -fi diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml deleted file mode 100644 index 11afa8fe7d6d..000000000000 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ /dev/null @@ -1,112 +0,0 @@ -tests: - base: {} - bench_build_block: - env: - BENCH_OUTPUT: '/out/bench.json' - e2e_2_pxes: {} - e2e_account_contracts: {} - e2e_amm: {} - e2e_authwit: {} - e2e_avm_simulator: {} - e2e_blacklist_token_contract: {} - e2e_block_building: {} - e2e_bot: {} - e2e_card_game: {} - e2e_cheat_codes: {} - e2e_contract_updates: {} - e2e_cross_chain_messaging: {} - e2e_crowdfunding_and_claim: {} - e2e_deploy_contract: {} - e2e_devnet_smoke: {} - docs_examples: - use_compose: true - e2e_epochs: {} - e2e_escrow_contract: {} - e2e_fees_account_init: - test_path: 'e2e_fees/account_init.test.ts' - # TODO(https://github.com/AztecProtocol/aztec-packages/issues/9488): reenable - # e2e_fees_dapp_subscription: - # test_path: 'e2e_fees/dapp_subscription.test.ts' - e2e_fees_failures: - test_path: 'e2e_fees/failures.test.ts' - e2e_fees_fee_juice_payments: - test_path: 'e2e_fees/fee_juice_payments.test.ts' - e2e_fees_gas_estimation: - test_path: 'e2e_fees/gas_estimation.test.ts' - e2e_fees_private_payments: - test_path: 'e2e_fees/private_payments.test.ts' - e2e_fees_public_payments: - test_path: 'e2e_fees/public_payments.test.ts' - e2e_keys: {} - e2e_l1_with_wall_time: {} - e2e_lending_contract: {} - e2e_event_logs: {} - e2e_max_block_number: {} - e2e_multiple_accounts_1_enc_key: {} - e2e_nested_contract: {} - e2e_nft: {} - e2e_note_getter: {} - e2e_offchain_note_delivery: {} - e2e_ordering: {} - e2e_outbox: {} - # TODO reenable in https://github.com/AztecProtocol/aztec-packages/pull/9727 - # e2e_pending_note_hashes_contract: {} - e2e_private_voting_contract: {} - e2e_prover_fake_proofs: - test_path: 'e2e_prover/full.test.ts' - env: - FAKE_PROOFS: '1' - # Ran in CI as standalone - e2e_prover_full: - test_path: 'e2e_prover/full.test.ts' - env: - HARDWARE_CONCURRENCY: '32' - e2e_pruned_blocks: {} - e2e_public_testnet: {} - e2e_pxe: - use_compose: true - e2e_sandbox_example: - use_compose: true - e2e_state_vars: {} - e2e_static_calls: {} - e2e_synching: {} - e2e_token_bridge_tutorial_test: - use_compose: true - e2e_token_contract: - with_alerts: true - e2e_p2p_gossip: - test_path: 'e2e_p2p/gossip_network.test.ts' - with_alerts: true - e2e_p2p_slashing: - test_path: 'e2e_p2p/slashing.test.ts' - e2e_p2p_upgrade_governance_proposer: - test_path: 'e2e_p2p/upgrade_governance_proposer.test.ts' - e2e_p2p_rediscovery: - test_path: 'e2e_p2p/rediscovery.test.ts' - e2e_p2p_reqresp: - test_path: 'e2e_p2p/reqresp.test.ts' - e2e_p2p_reex: - test_path: 'e2e_p2p/reex.test.ts' - # flakey_e2e_tests: - # test_path: './src/flakey' - # ignore_failures: true - guides_dapp_testing: - use_compose: true - test_path: 'guides/dapp_testing.test.ts' - guides_sample_dapp: - use_compose: true - test_path: 'sample-dapp/index.test.mjs' - guides_sample_dapp_ci: - use_compose: true - test_path: 'sample-dapp/ci/index.test.mjs' - guides_up_quick_start: - use_compose: true - test_path: 'guides/up_quick_start.test.ts' - guides_writing_an_account_contract: - use_compose: true - test_path: 'guides/writing_an_account_contract.test.ts' - integration_l1_publisher: - use_compose: true - # https://github.com/AztecProtocol/aztec-packages/issues/10030 - # uniswap_trade_on_l1_from_l2: - # use_compose: true diff --git a/yarn-project/end-to-end/scripts/e2e_test_public_testnet.sh b/yarn-project/end-to-end/scripts/e2e_test_public_testnet.sh index d06b065cd3ad..97f5085de81a 100755 --- a/yarn-project/end-to-end/scripts/e2e_test_public_testnet.sh +++ b/yarn-project/end-to-end/scripts/e2e_test_public_testnet.sh @@ -1,4 +1,5 @@ #!/bin/bash +# TODO needs porting to ci3 # Usage: ./e2e_test_public_testnet.sh # Optional environment variables: diff --git a/yarn-project/end-to-end/scripts/e2e_test_with_alerts.sh b/yarn-project/end-to-end/scripts/e2e_test_with_alerts.sh index 5611efa7ab21..7cf65c91000e 100755 --- a/yarn-project/end-to-end/scripts/e2e_test_with_alerts.sh +++ b/yarn-project/end-to-end/scripts/e2e_test_with_alerts.sh @@ -1,4 +1,5 @@ #! /bin/bash +# TODO needs porting to ci3 ## Run an end to end test with alerts # This will run an end to end test running the otel-lgtm stack (otel-collector, grafana, prometheus, tempo and loki) diff --git a/yarn-project/end-to-end/scripts/run_test.sh b/yarn-project/end-to-end/scripts/run_test.sh index d0d431aa8f01..4cb2205ffd6c 100755 --- a/yarn-project/end-to-end/scripts/run_test.sh +++ b/yarn-project/end-to-end/scripts/run_test.sh @@ -20,25 +20,30 @@ case "$type" in # Strip leading non alpha numerics and replace / with _ for the container name. name="$(echo "${TEST}" | sed 's/^[^a-zA-Z0-9]*//' | tr '/' '_')${NAME_POSTFIX:-}" name_arg="--name $name" + repo_dir=$(git rev-parse --show-toplevel) trap 'docker rm -f $name &>/dev/null' SIGINT SIGTERM EXIT docker rm -f $name &>/dev/null || true docker run --rm \ $name_arg \ --cpus=${CPUS:-4} \ --memory=${MEM:-8g} \ - -v$(git rev-parse --show-toplevel):/root/aztec-packages \ - -v$HOME/.bb-crs:/root/.bb-crs \ + --user $(id -u):$(id -g) \ + "-v$repo_dir:$repo_dir" \ + "-v$HOME/.bb-crs:$HOME/.bb-crs" \ --mount type=tmpfs,target=/tmp,tmpfs-size=1g \ --mount type=tmpfs,target=/tmp-jest,tmpfs-size=512m \ + -e HOME \ -e JEST_CACHE_DIR=/tmp-jest \ -e FAKE_PROOFS \ -e BENCH_OUTPUT \ + -e CAPTURE_IVC_FOLDER \ -e LOG_LEVEL \ - --workdir /root/aztec-packages/yarn-project/end-to-end \ + --workdir "$repo_dir/yarn-project/end-to-end" \ aztecprotocol/build:3.0 ./scripts/test_simple.sh $TEST ;; "compose") - name="${TEST//[\/\.]/_}${NAME_POSTFIX:-}" + # Strip leading non alpha numerics and replace / and . with _. + name=$(echo "${TEST}${NAME_POSTFIX:-}" | sed 's/^[^a-zA-Z0-9]*//; s/[\/\.]/_/g') name_arg="-p $name" trap 'docker compose $name_arg down --timeout 0' SIGTERM SIGINT EXIT docker compose $name_arg down --timeout 0 &> /dev/null diff --git a/yarn-project/end-to-end/src/e2e_amm.test.ts b/yarn-project/end-to-end/src/e2e_amm.test.ts index 71c4640e91b2..a08a90b625e5 100644 --- a/yarn-project/end-to-end/src/e2e_amm.test.ts +++ b/yarn-project/end-to-end/src/e2e_amm.test.ts @@ -6,6 +6,7 @@ import { jest } from '@jest/globals'; import { deployToken, mintTokensToPrivate } from './fixtures/token_utils.js'; import { setup } from './fixtures/utils.js'; +import { capturePrivateExecutionStepsIfEnvSet } from './shared/capture_private_execution_steps.js'; const TIMEOUT = 120_000; @@ -124,11 +125,11 @@ describe('AMM', () => { ), }); - await amm + const addLiquidityInteraction = amm .withWallet(liquidityProvider) - .methods.add_liquidity(amount0Max, amount1Max, amount0Min, amount1Min, nonceForAuthwits) - .send() - .wait(); + .methods.add_liquidity(amount0Max, amount1Max, amount0Min, amount1Min, nonceForAuthwits); + await capturePrivateExecutionStepsIfEnvSet('amm-add-liquidity', addLiquidityInteraction); + await addLiquidityInteraction.send().wait(); const ammBalancesAfter = await getAmmBalances(); const lpBalancesAfter = await getWalletBalances(liquidityProvider); @@ -233,11 +234,12 @@ describe('AMM', () => { const amountOutMin = await amm.methods .get_amount_out_for_exact_in(ammBalancesBefore.token0, ammBalancesBefore.token1, amountIn) .simulate(); - await amm + + const swapExactTokensInteraction = amm .withWallet(swapper) - .methods.swap_exact_tokens_for_tokens(token0.address, token1.address, amountIn, amountOutMin, nonceForAuthwits) - .send() - .wait(); + .methods.swap_exact_tokens_for_tokens(token0.address, token1.address, amountIn, amountOutMin, nonceForAuthwits); + await capturePrivateExecutionStepsIfEnvSet('amm-swap-exact-tokens', swapExactTokensInteraction); + await swapExactTokensInteraction.send().wait(); // We know exactly how many tokens we're supposed to get because we know nobody else interacted with the AMM // before we did. @@ -309,6 +311,7 @@ describe('AMM', () => { // real-life scenario we'd need to choose sensible amounts to avoid losing value due to slippage. const amount0Min = 1n; const amount1Min = 1n; + await amm .withWallet(otherLiquidityProvider) .methods.remove_liquidity(liquidityTokenBalance, amount0Min, amount1Min, nonceForAuthwits) diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts index ba524f3a41af..ed9ddfa6cd2a 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts @@ -1,6 +1,7 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; +import { capturePrivateExecutionStepsIfEnvSet } from '../shared/capture_private_execution_steps.js'; import { BlacklistTokenContractTest } from './blacklist_token_contract_test.js'; describe('e2e_blacklist_token_contract transfer private', () => { @@ -28,7 +29,11 @@ describe('e2e_blacklist_token_contract transfer private', () => { const balance0 = await asset.methods.balance_of_private(wallets[0].getAddress()).simulate(); const amount = balance0 / 2n; expect(amount).toBeGreaterThan(0n); - await asset.methods.transfer(wallets[0].getAddress(), wallets[1].getAddress(), amount, 0).send().wait(); + const tokenTransferInteraction = asset + .withWallet(wallets[0]) + .methods.transfer(wallets[0].getAddress(), wallets[1].getAddress(), amount, 0); + await capturePrivateExecutionStepsIfEnvSet('token-transfer', tokenTransferInteraction); + await tokenTransferInteraction.send().wait(); tokenSim.transferPrivate(wallets[0].getAddress(), wallets[1].getAddress(), amount); // We give wallets[0] access to wallets[1]'s notes to be able to check balances after the test. diff --git a/yarn-project/end-to-end/src/e2e_nft.test.ts b/yarn-project/end-to-end/src/e2e_nft.test.ts index fcbd17115df8..973323ce3e57 100644 --- a/yarn-project/end-to-end/src/e2e_nft.test.ts +++ b/yarn-project/end-to-end/src/e2e_nft.test.ts @@ -4,6 +4,7 @@ import { NFTContract } from '@aztec/noir-contracts.js/NFT'; import { jest } from '@jest/globals'; import { setup } from './fixtures/utils.js'; +import { capturePrivateExecutionStepsIfEnvSet } from './shared/capture_private_execution_steps.js'; const TIMEOUT = 120_000; @@ -49,7 +50,9 @@ describe('NFT', () => { it('minter mints to a user', async () => { const nftContractAsMinter = await NFTContract.at(nftContractAddress, minterWallet); - await nftContractAsMinter.methods.mint(user1Wallet.getAddress(), TOKEN_ID).send().wait(); + const nftMintInteraction = nftContractAsMinter.methods.mint(user1Wallet.getAddress(), TOKEN_ID); + await capturePrivateExecutionStepsIfEnvSet('nft-mint', nftMintInteraction); + await nftMintInteraction.send().wait(); const ownerAfterMint = await nftContractAsMinter.methods.owner_of(TOKEN_ID).simulate(); expect(ownerAfterMint).toEqual(user1Wallet.getAddress()); @@ -71,10 +74,14 @@ describe('NFT', () => { it('transfers in private', async () => { const nftContractAsUser2 = await NFTContract.at(nftContractAddress, user2Wallet); - await nftContractAsUser2.methods - .transfer_in_private(user2Wallet.getAddress(), user1Wallet.getAddress(), TOKEN_ID, 0) - .send() - .wait(); + const nftTransferInteraction = nftContractAsUser2.methods.transfer_in_private( + user2Wallet.getAddress(), + user1Wallet.getAddress(), + TOKEN_ID, + 0, + ); + await capturePrivateExecutionStepsIfEnvSet('nft-transfer-in-private', nftTransferInteraction); + await nftTransferInteraction.send().wait(); const user1Nfts = await getPrivateNfts(user1Wallet.getAddress()); expect(user1Nfts).toEqual([TOKEN_ID]); diff --git a/yarn-project/end-to-end/src/shared/capture_private_execution_steps.ts b/yarn-project/end-to-end/src/shared/capture_private_execution_steps.ts new file mode 100644 index 000000000000..452b6ae2e218 --- /dev/null +++ b/yarn-project/end-to-end/src/shared/capture_private_execution_steps.ts @@ -0,0 +1,43 @@ +/** + * This module exposes the ability to capture the private exection steps that go into our "Client IVC" prover. + * These are used for debugging and benchmarking barretenberg (the prover component). + */ +import type { ContractFunctionInteraction } from '@aztec/aztec.js/contracts'; +import { createLogger } from '@aztec/foundation/log'; +import { serializeWitness } from '@aztec/noir-noirc_abi'; +import type { PrivateExecutionStep } from '@aztec/stdlib/kernel'; + +import { encode } from '@msgpack/msgpack'; +import { promises as fs } from 'fs'; +import path from 'path'; + +const logger = createLogger('e2e:capture-private-execution-steps'); + +// TODO(#7371): This is duplicated. +// Longer term we won't use this hacked together msgpack format +// Leaving duplicated as this eventually bb will provide a serialization +// helper for passing to a generic msgpack RPC endpoint. +async function _createClientIvcProofFiles(directory: string, executionSteps: PrivateExecutionStep[]) { + const acirPath = path.join(directory, 'acir.msgpack'); + const witnessPath = path.join(directory, 'witnesses.msgpack'); + await fs.writeFile(acirPath, encode(executionSteps.map(map => map.bytecode))); + await fs.writeFile(witnessPath, encode(executionSteps.map(map => serializeWitness(map.witness)))); + return { + acirPath, + witnessPath, + }; +} + +export async function capturePrivateExecutionStepsIfEnvSet(label: string, interaction: ContractFunctionInteraction) { + // Not included in env_var.ts as internal to e2e tests. + const ivcFolder = process.env.CAPTURE_IVC_FOLDER; + if (ivcFolder) { + logger.info(`Capturing client ivc execution steps for ${label}`); + const result = await interaction.profile({ profileMode: 'execution-steps' }); + const resultsDirectory = path.join(ivcFolder, label); + logger.info(`Writing private execution steps to ${resultsDirectory}`); + await fs.mkdir(resultsDirectory, { recursive: true }); + await _createClientIvcProofFiles(resultsDirectory, result.executionSteps); + logger.info(`Wrote private execution steps to ${resultsDirectory}`); + } +} diff --git a/yarn-project/entrypoints/src/dapp_entrypoint.ts b/yarn-project/entrypoints/src/dapp_entrypoint.ts index 4141b417a8ed..ac7ecf963c7c 100644 --- a/yarn-project/entrypoints/src/dapp_entrypoint.ts +++ b/yarn-project/entrypoints/src/dapp_entrypoint.ts @@ -1,7 +1,7 @@ import { Fr, computeAuthWitMessageHash, computeInnerAuthWitHash } from '@aztec/aztec.js'; import type { AuthWitnessProvider } from '@aztec/aztec.js/account'; import { type EntrypointInterface, EntrypointPayload, type ExecutionRequestInit } from '@aztec/aztec.js/entrypoint'; -import { type FunctionAbi, FunctionSelector, encodeArguments } from '@aztec/stdlib/abi'; +import { type FunctionAbi, FunctionSelector, FunctionType, encodeArguments } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { HashedValues, TxContext, TxExecutionRequest } from '@aztec/stdlib/tx'; @@ -57,11 +57,11 @@ export class DefaultDappEntrypoint implements EntrypointInterface { return txRequest; } - private getEntrypointAbi() { + private getEntrypointAbi(): FunctionAbi { return { name: 'entrypoint', isInitializer: false, - functionType: 'private', + functionType: FunctionType.PRIVATE, isInternal: false, isStatic: false, parameters: [ @@ -120,6 +120,6 @@ export class DefaultDappEntrypoint implements EntrypointInterface { ], returnTypes: [], errorTypes: {}, - } as FunctionAbi; + } as const; } } diff --git a/yarn-project/foundation/.eslintrc.cjs b/yarn-project/foundation/.eslintrc.cjs index 6ea775e348e1..a74aaccf8646 100644 --- a/yarn-project/foundation/.eslintrc.cjs +++ b/yarn-project/foundation/.eslintrc.cjs @@ -48,6 +48,7 @@ module.exports = { 'no-constant-condition': 'off', curly: ['error', 'all'], camelcase: 2, + 'import/no-relative-packages': 'error', 'no-restricted-imports': [ 'error', { diff --git a/yarn-project/kv-store/src/lmdb-v2/array.ts b/yarn-project/kv-store/src/lmdb-v2/array.ts index 039ee1cbea18..b1362f34f7fb 100644 --- a/yarn-project/kv-store/src/lmdb-v2/array.ts +++ b/yarn-project/kv-store/src/lmdb-v2/array.ts @@ -3,6 +3,7 @@ import { Encoder } from 'msgpackr/pack'; import type { AztecAsyncArray } from '../interfaces/array.js'; import type { AztecAsyncSingleton } from '../interfaces/singleton.js'; import type { ReadTransaction } from './read_transaction.js'; +// eslint-disable-next-line import/no-cycle import { AztecLMDBStoreV2, execInReadTx, execInWriteTx } from './store.js'; import { deserializeKey, serializeKey } from './utils.js'; diff --git a/yarn-project/kv-store/src/lmdb-v2/map.ts b/yarn-project/kv-store/src/lmdb-v2/map.ts index 15a12b982a8f..7a451c68f787 100644 --- a/yarn-project/kv-store/src/lmdb-v2/map.ts +++ b/yarn-project/kv-store/src/lmdb-v2/map.ts @@ -3,6 +3,7 @@ import { Encoder } from 'msgpackr'; import type { Key, Range } from '../interfaces/common.js'; import type { AztecAsyncMap, AztecAsyncMultiMap } from '../interfaces/map.js'; import type { ReadTransaction } from './read_transaction.js'; +// eslint-disable-next-line import/no-cycle import { type AztecLMDBStoreV2, execInReadTx, execInWriteTx } from './store.js'; import { deserializeKey, maxKey, minKey, serializeKey } from './utils.js'; diff --git a/yarn-project/kv-store/src/lmdb-v2/singleton.ts b/yarn-project/kv-store/src/lmdb-v2/singleton.ts index a6a892543c12..491ba0be7263 100644 --- a/yarn-project/kv-store/src/lmdb-v2/singleton.ts +++ b/yarn-project/kv-store/src/lmdb-v2/singleton.ts @@ -1,6 +1,7 @@ import { Encoder } from 'msgpackr'; import type { AztecAsyncSingleton } from '../interfaces/singleton.js'; +// eslint-disable-next-line import/no-cycle import { type AztecLMDBStoreV2, execInReadTx, execInWriteTx } from './store.js'; import { serializeKey } from './utils.js'; diff --git a/yarn-project/kv-store/src/lmdb-v2/store.ts b/yarn-project/kv-store/src/lmdb-v2/store.ts index bd55a6e0468a..c4aa30cf2104 100644 --- a/yarn-project/kv-store/src/lmdb-v2/store.ts +++ b/yarn-project/kv-store/src/lmdb-v2/store.ts @@ -12,7 +12,9 @@ import type { AztecAsyncMap, AztecAsyncMultiMap } from '../interfaces/map.js'; import type { AztecAsyncSet } from '../interfaces/set.js'; import type { AztecAsyncSingleton } from '../interfaces/singleton.js'; import type { AztecAsyncKVStore } from '../interfaces/store.js'; +// eslint-disable-next-line import/no-cycle import { LMDBArray } from './array.js'; +// eslint-disable-next-line import/no-cycle import { LMDBMap, LMDBMultiMap } from './map.js'; import { Database, @@ -22,6 +24,7 @@ import { type LMDBResponseBody, } from './message.js'; import { ReadTransaction } from './read_transaction.js'; +// eslint-disable-next-line import/no-cycle import { LMDBSingleValue } from './singleton.js'; import { WriteTransaction } from './write_transaction.js'; diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 1c853577611a..4b940f41ae89 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -26,6 +26,8 @@ import { NativeWorldStateService } from '@aztec/world-state/native'; import { promises as fs } from 'fs'; +// TODO(#12613) This means of sharing test code is not ideal. +// eslint-disable-next-line import/no-relative-packages import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js'; import { buildBlock } from '../block_builder/light.js'; import { ProvingOrchestrator } from '../orchestrator/index.js'; diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts index 0db23aa7b59d..3968f9c3a842 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts @@ -6,6 +6,8 @@ import type { ServerCircuitProver } from '@aztec/stdlib/interfaces/server'; import { jest } from '@jest/globals'; +// TODO(#12613) This means of sharing test code is not ideal. +// eslint-disable-next-line import/no-relative-packages import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js'; import { TestContext } from '../mocks/test_context.js'; import { ProvingOrchestrator } from './orchestrator.js'; diff --git a/yarn-project/pxe/scripts/generate_package_info.js b/yarn-project/pxe/scripts/generate_package_info.js index 57c9801b02dc..12f258ce7c22 100644 --- a/yarn-project/pxe/scripts/generate_package_info.js +++ b/yarn-project/pxe/scripts/generate_package_info.js @@ -2,9 +2,12 @@ import { readFileSync, writeFileSync } from 'fs'; import { dirname, join } from 'path'; import { fileURLToPath } from 'url'; + const currentDir = dirname(fileURLToPath(import.meta.url)); -const { version, name } = JSON.parse(readFileSync(join(currentDir, '../package.json'))); +const { name } = JSON.parse(readFileSync(join(currentDir, '../package.json'))); +const versionManifest = JSON.parse(readFileSync(join(currentDir, '../../../.release-please-manifest.json'))); +const version = versionManifest['.']; writeFileSync( join(currentDir, '../src/config/package_info.ts'), diff --git a/yarn-project/pxe/src/entrypoints/server/utils.ts b/yarn-project/pxe/src/entrypoints/server/utils.ts index 8d157bdb1d51..2596e3795db1 100644 --- a/yarn-project/pxe/src/entrypoints/server/utils.ts +++ b/yarn-project/pxe/src/entrypoints/server/utils.ts @@ -9,7 +9,7 @@ import type { AztecNode, PrivateKernelProver } from '@aztec/stdlib/interfaces/cl import type { PXEServiceConfig } from '../../config/index.js'; import { PXEService } from '../../pxe_service/pxe_service.js'; -import { PXE_DATA_SCHEMA_VERSION } from './index.js'; +import { PXE_DATA_SCHEMA_VERSION } from '../../storage/index.js'; /** * Create and start an PXEService instance with the given AztecNode. diff --git a/yarn-project/pxe/src/kernel_prover/index.ts b/yarn-project/pxe/src/kernel_prover/index.ts deleted file mode 100644 index 94bc7f6892de..000000000000 --- a/yarn-project/pxe/src/kernel_prover/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './kernel_prover.js'; -export * from './proving_data_oracle.js'; diff --git a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts b/yarn-project/pxe/src/private_kernel/hints/build_private_kernel_reset_private_inputs.ts similarity index 98% rename from yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts rename to yarn-project/pxe/src/private_kernel/hints/build_private_kernel_reset_private_inputs.ts index 342c7dbcc948..0bc99cc42b9f 100644 --- a/yarn-project/pxe/src/kernel_prover/hints/build_private_kernel_reset_private_inputs.ts +++ b/yarn-project/pxe/src/private_kernel/hints/build_private_kernel_reset_private_inputs.ts @@ -43,7 +43,7 @@ import { } from '@aztec/stdlib/kernel'; import { type PrivateCallExecutionResult, collectNested } from '@aztec/stdlib/tx'; -import type { ProvingDataOracle } from '../proving_data_oracle.js'; +import type { PrivateKernelOracle } from '../private_kernel_oracle.js'; function collectNestedReadRequests( executionStack: PrivateCallExecutionResult[], @@ -57,7 +57,7 @@ function collectNestedReadRequests( }); } -function getNullifierMembershipWitnessResolver(oracle: ProvingDataOracle) { +function getNullifierMembershipWitnessResolver(oracle: PrivateKernelOracle) { return async (nullifier: Fr) => { const res = await oracle.getNullifierMembershipWitness(nullifier); if (!res) { @@ -74,7 +74,7 @@ function getNullifierMembershipWitnessResolver(oracle: ProvingDataOracle) { async function getMasterSecretKeysAndAppKeyGenerators( keyValidationRequests: Tuple, - oracle: ProvingDataOracle, + oracle: PrivateKernelOracle, ) { const keysHints = []; for (let i = 0; i < keyValidationRequests.length; ++i) { @@ -144,7 +144,7 @@ export class PrivateKernelResetPrivateInputsBuilder { } } - async build(oracle: ProvingDataOracle, noteHashLeafIndexMap: Map) { + async build(oracle: PrivateKernelOracle, noteHashLeafIndexMap: Map) { if (privateKernelResetDimensionNames.every(name => !this.requestedDimensions[name])) { throw new Error('Reset is not required.'); } diff --git a/yarn-project/pxe/src/kernel_prover/hints/index.ts b/yarn-project/pxe/src/private_kernel/hints/index.ts similarity index 100% rename from yarn-project/pxe/src/kernel_prover/hints/index.ts rename to yarn-project/pxe/src/private_kernel/hints/index.ts diff --git a/yarn-project/pxe/src/private_kernel/index.ts b/yarn-project/pxe/src/private_kernel/index.ts new file mode 100644 index 000000000000..10ab0c372dcf --- /dev/null +++ b/yarn-project/pxe/src/private_kernel/index.ts @@ -0,0 +1,2 @@ +export * from './private_kernel_execution_prover.js'; +export * from './private_kernel_oracle.js'; diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.test.ts similarity index 93% rename from yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts rename to yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.test.ts index 6eae2a21c052..8cf71a052421 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.test.ts @@ -31,14 +31,14 @@ import { VerificationKey, VerificationKeyAsFields } from '@aztec/stdlib/vks'; import { mock } from 'jest-mock-extended'; -import { KernelProver } from './kernel_prover.js'; -import type { ProvingDataOracle } from './proving_data_oracle.js'; +import { PrivateKernelExecutionProver } from './private_kernel_execution_prover.js'; +import type { PrivateKernelOracle } from './private_kernel_oracle.js'; -describe('Kernel Prover', () => { +describe('Private Kernel Sequencer', () => { let txRequest: TxRequest; - let oracle: ReturnType>; + let oracle: ReturnType>; let proofCreator: ReturnType>; - let prover: KernelProver; + let prover: PrivateKernelExecutionProver; let dependencies: { [name: string]: string[] } = {}; const contractAddress = AztecAddress.fromBigInt(987654n); @@ -132,12 +132,12 @@ describe('Kernel Prover', () => { proofCreator.simulateInit.mockClear(); }; - const prove = (executionResult: PrivateExecutionResult) => prover.prove(txRequest, executionResult); + const prove = (executionResult: PrivateExecutionResult) => prover.proveWithKernels(txRequest, executionResult); beforeEach(async () => { txRequest = makeTxRequest(); - oracle = mock(); + oracle = mock(); // TODO(dbanks12): will need to mock oracle.getNoteMembershipWitness() to test non-transient reads oracle.getVkMembershipWitness.mockResolvedValue(MembershipWitness.random(VK_TREE_HEIGHT)); @@ -159,7 +159,7 @@ describe('Kernel Prover', () => { proofCreator.simulateReset.mockResolvedValue(simulateProofOutput([])); proofCreator.simulateTail.mockResolvedValue(simulateProofOutputFinal([])); - prover = new KernelProver(oracle, proofCreator, true); + prover = new PrivateKernelExecutionProver(oracle, proofCreator, true); }); it('should create proofs in correct order', async () => { diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.ts similarity index 76% rename from yarn-project/pxe/src/kernel_prover/kernel_prover.ts rename to yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.ts index 80f43560cbca..9484772b88e1 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts +++ b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.ts @@ -6,7 +6,6 @@ import { assertLength } from '@aztec/foundation/serialize'; import { pushTestData } from '@aztec/foundation/testing'; import { Timer } from '@aztec/foundation/timer'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; -import type { WitnessMap } from '@aztec/noir-types'; import { getProtocolContractLeafAndMembershipWitness, protocolContractTreeRoot } from '@aztec/protocol-contracts'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { computeContractAddressFromInstance } from '@aztec/stdlib/contract'; @@ -14,8 +13,10 @@ import { hashVK } from '@aztec/stdlib/hash'; import type { PrivateKernelProver } from '@aztec/stdlib/interfaces/client'; import { PrivateCallData, + type PrivateExecutionStep, PrivateKernelCircuitPublicInputs, PrivateKernelData, + type PrivateKernelExecutionProofOutput, PrivateKernelInitCircuitPrivateInputs, PrivateKernelInnerCircuitPrivateInputs, type PrivateKernelSimulateOutput, @@ -37,32 +38,32 @@ import { import { VerificationKeyAsFields } from '@aztec/stdlib/vks'; import { PrivateKernelResetPrivateInputsBuilder } from './hints/build_private_kernel_reset_private_inputs.js'; -import type { ProvingDataOracle } from './proving_data_oracle.js'; +import type { PrivateKernelOracle } from './private_kernel_oracle.js'; -const NULL_PROVE_OUTPUT: PrivateKernelSimulateOutput = { +const NULL_SIMULATE_OUTPUT: PrivateKernelSimulateOutput = { publicInputs: PrivateKernelCircuitPublicInputs.empty(), verificationKey: VerificationKeyAsFields.makeEmpty(CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS), outputWitness: new Map(), bytecode: Buffer.from([]), }; -export type ProvingConfig = { +export interface PrivateKernelExecutionProverConfig { simulate: boolean; skipFeeEnforcement: boolean; - profile: boolean; -}; + profileMode: 'gates' | 'execution-steps' | 'full' | 'none'; +} /** - * The KernelProver class is responsible for generating kernel proofs. - * It takes a transaction request, its signature, and the simulation result as inputs, and outputs a proof - * along with output notes. The class interacts with a ProvingDataOracle to fetch membership witnesses and - * constructs private call data based on the execution results. + * The PrivateKernelSequencer class is responsible for taking a transaction request and sequencing the + * the execution of the private functions within, sequenced with private kernel "glue" to check protocol rules. + * The result can be a client IVC proof of the private transaction portion, or just a simulation that can e.g. + * inform state tree updates. */ -export class KernelProver { - private log = createLogger('pxe:kernel-prover'); +export class PrivateKernelExecutionProver { + private log = createLogger('pxe:private-kernel-execution-prover'); constructor( - private oracle: ProvingDataOracle, + private oracle: PrivateKernelOracle, private proofCreator: PrivateKernelProver, private fakeProofs = false, ) {} @@ -77,19 +78,18 @@ export class KernelProver { * @param executionResult - The execution result object containing nested executions and preimages. * @param profile - Set true to profile the gate count for each circuit * @returns A Promise that resolves to a KernelProverOutput object containing proof, public inputs, and output notes. - * TODO(#7368) this should be refactored to not recreate the ACIR bytecode now that it operates on a program stack */ - async prove( + async proveWithKernels( txRequest: TxRequest, executionResult: PrivateExecutionResult, - { simulate, skipFeeEnforcement, profile }: ProvingConfig = { + { simulate, skipFeeEnforcement, profileMode }: PrivateKernelExecutionProverConfig = { simulate: false, skipFeeEnforcement: false, - profile: false, + profileMode: 'none', }, - ): Promise> { + ): Promise> { const skipProofGeneration = this.fakeProofs || simulate; - const generateWitnesses = !skipProofGeneration || profile; + const generateWitnesses = !skipProofGeneration || profileMode !== 'none'; const timer = new Timer(); @@ -98,15 +98,9 @@ export class KernelProver { const executionStack = [executionResult.entrypoint]; let firstIteration = true; - let output = NULL_PROVE_OUTPUT; + let output = NULL_SIMULATE_OUTPUT; - const gateCounts: { circuitName: string; gateCount: number }[] = []; - const addGateCount = async (circuitName: string, bytecode: Buffer) => { - const gateCount = (await this.proofCreator.computeGateCountForCircuit(bytecode, circuitName)) as number; - gateCounts.push({ circuitName, gateCount }); - - this.log.debug(`Gate count for ${circuitName} - ${gateCount}`); - }; + const executionSteps: PrivateExecutionStep[] = []; const noteHashLeafIndexMap = collectNoteHashLeafIndexMap(executionResult); const noteHashNullifierCounterMap = collectNoteHashNullifierCounterMap(executionResult); @@ -114,9 +108,6 @@ export class KernelProver { const hasPublicCalls = enqueuedPublicFunctions.length > 0 || !collectPublicTeardownFunctionCall(executionResult).isEmpty(); const validationRequestsSplitCounter = hasPublicCalls ? getFinalMinRevertibleSideEffectCounter(executionResult) : 0; - // vector of gzipped bincode acirs - const acirs: Buffer[] = []; - const witnessStack: WitnessMap[] = []; while (executionStack.length) { if (!firstIteration) { @@ -128,16 +119,14 @@ export class KernelProver { ); while (resetBuilder.needsReset()) { const privateInputs = await resetBuilder.build(this.oracle, noteHashLeafIndexMap); - output = generateWitnesses - ? await this.proofCreator.generateResetOutput(privateInputs) - : await this.proofCreator.simulateReset(privateInputs); - // TODO(#7368) consider refactoring this redundant bytecode pushing - acirs.push(output.bytecode); - witnessStack.push(output.outputWitness); - if (profile) { - await addGateCount('private_kernel_reset', output.bytecode); - } - + output = simulate + ? await this.proofCreator.simulateReset(privateInputs) + : await this.proofCreator.generateResetOutput(privateInputs); + executionSteps.push({ + functionName: 'private_kernel_reset', + bytecode: output.bytecode, + witness: output.outputWitness, + }); resetBuilder = new PrivateKernelResetPrivateInputsBuilder( output, executionStack, @@ -156,13 +145,11 @@ export class KernelProver { currentExecution.publicInputs.callContext.functionSelector, ); - // TODO(#7368): This used to be associated with getDebugFunctionName - // TODO(#7368): Is there any way to use this with client IVC proving? - acirs.push(currentExecution.acir); - witnessStack.push(currentExecution.partialWitness); - if (profile) { - await addGateCount(functionName as string, currentExecution.acir); - } + executionSteps.push({ + functionName: functionName!, + bytecode: currentExecution.acir, + witness: currentExecution.partialWitness, + }); const privateCallData = await this.createPrivateCallData(currentExecution); @@ -185,11 +172,11 @@ export class KernelProver { ? await this.proofCreator.generateInitOutput(proofInput) : await this.proofCreator.simulateInit(proofInput); - acirs.push(output.bytecode); - witnessStack.push(output.outputWitness); - if (profile) { - await addGateCount('private_kernel_init', output.bytecode); - } + executionSteps.push({ + functionName: 'private_kernel_init', + bytecode: output.bytecode, + witness: output.outputWitness, + }); } else { const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(output.verificationKey); const previousKernelData = new PrivateKernelData( @@ -206,11 +193,11 @@ export class KernelProver { ? await this.proofCreator.generateInnerOutput(proofInput) : await this.proofCreator.simulateInner(proofInput); - acirs.push(output.bytecode); - witnessStack.push(output.outputWitness); - if (profile) { - await addGateCount('private_kernel_inner', output.bytecode); - } + executionSteps.push({ + functionName: 'private_kernel_inner', + bytecode: output.bytecode, + witness: output.outputWitness, + }); } firstIteration = false; } @@ -228,11 +215,11 @@ export class KernelProver { ? await this.proofCreator.generateResetOutput(privateInputs) : await this.proofCreator.simulateReset(privateInputs); - acirs.push(output.bytecode); - witnessStack.push(output.outputWitness); - if (profile) { - await addGateCount('private_kernel_reset', output.bytecode); - } + executionSteps.push({ + functionName: 'private_kernel_reset', + bytecode: output.bytecode, + witness: output.outputWitness, + }); resetBuilder = new PrivateKernelResetPrivateInputsBuilder( output, @@ -269,26 +256,44 @@ export class KernelProver { ? await this.proofCreator.generateTailOutput(privateInputs) : await this.proofCreator.simulateTail(privateInputs); - acirs.push(tailOutput.bytecode); - witnessStack.push(tailOutput.outputWitness); - if (profile) { - await addGateCount('private_kernel_tail', tailOutput.bytecode); - tailOutput.profileResult = { gateCounts }; + executionSteps.push({ + functionName: 'private_kernel_tail', + bytecode: tailOutput.bytecode, + witness: tailOutput.outputWitness, + }); + + if (profileMode == 'gates' || profileMode == 'full') { + for (const entry of executionSteps) { + const gateCount = await this.proofCreator.computeGateCountForCircuit(entry.bytecode, entry.functionName); + entry.gateCount = gateCount; + } + } + if (profileMode === 'gates') { + for (const entry of executionSteps) { + // These buffers are often a few megabytes in size - prevent accidentally serializing them if not requested. + entry.bytecode = Buffer.from([]); + entry.witness = new Map(); + } } if (generateWitnesses) { this.log.info(`Private kernel witness generation took ${timer.ms()}ms`); } + let clientIvcProof: ClientIvcProof; // TODO(#7368) how do we 'bincode' encode these inputs? if (!skipProofGeneration) { - const ivcProof = await this.proofCreator.createClientIvcProof(acirs, witnessStack); - tailOutput.clientIvcProof = ivcProof; + clientIvcProof = await this.proofCreator.createClientIvcProof(executionSteps); } else { - tailOutput.clientIvcProof = ClientIvcProof.random(); + clientIvcProof = ClientIvcProof.random(); } - return tailOutput; + return { + publicInputs: tailOutput.publicInputs, + executionSteps, + clientIvcProof, + verificationKey: tailOutput.verificationKey, + }; } private async createPrivateCallData({ publicInputs, vk: vkAsBuffer }: PrivateCallExecutionResult) { diff --git a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts b/yarn-project/pxe/src/private_kernel/private_kernel_oracle.ts similarity index 54% rename from yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts rename to yarn-project/pxe/src/private_kernel/private_kernel_oracle.ts index bf10b6ad9776..170674df1f64 100644 --- a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts +++ b/yarn-project/pxe/src/private_kernel/private_kernel_oracle.ts @@ -1,6 +1,6 @@ -import type { FUNCTION_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, VK_TREE_HEIGHT } from '@aztec/constants'; +import { FUNCTION_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, VK_TREE_HEIGHT } from '@aztec/constants'; import type { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; -import type { MembershipWitness } from '@aztec/foundation/trees'; +import { MembershipWitness } from '@aztec/foundation/trees'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { UpdatedClassIdHints } from '@aztec/stdlib/kernel'; @@ -9,10 +9,10 @@ import type { NullifierMembershipWitness } from '@aztec/stdlib/trees'; import type { VerificationKeyAsFields } from '@aztec/stdlib/vks'; /** - * Provides functionality to fetch membership witnesses for verification keys, - * contract addresses, and function selectors in their respective merkle trees. + * Provides functionality needed by the private kernel for interacting with our state trees. + * This is either PrivateKernelOracleImpl, or a mocked test implementation. */ -export interface ProvingDataOracle { +export interface PrivateKernelOracle { /** Retrieves the preimage of a contract address from the registered contract instances db. */ getContractAddressPreimage(address: AztecAddress): Promise<{ saltedInitializationHash: Fr; @@ -27,13 +27,7 @@ export interface ProvingDataOracle { ): Promise<{ artifactHash: Fr; publicBytecodeCommitment: Fr; privateFunctionsRoot: Fr }>; /** - * Retrieve the function membership witness for the given contract class and function selector. - * The function membership witness represents a proof that the function belongs to the specified contract. - * Throws an error if the contract address or function selector is unknown. - * - * @param contractClassId - The id of the class. - * @param selector - The function selector. - * @returns A promise that resolves with the MembershipWitness instance for the specified contract's function. + * Returns a membership witness with the sibling path and leaf index in our private functions tree. */ getFunctionMembershipWitness( contractClassId: Fr, @@ -41,30 +35,21 @@ export interface ProvingDataOracle { ): Promise>; /** - * Retrieve the membership witness corresponding to a verification key. - * This function currently returns a random membership witness of the specified height, - * which is a placeholder implementation until a concrete membership witness calculation - * is implemented. - * - * @param vk - The VerificationKey for which the membership witness is needed. - * @returns A Promise that resolves to the MembershipWitness instance. + * Returns a membership witness with the sibling path and leaf index in our protocol VK indexed merkle tree. + * Used to validate the previous kernel's verification key. */ getVkMembershipWitness(vk: VerificationKeyAsFields): Promise>; /** - * Get the note membership witness for a note in the note hash tree at the given leaf index. - * - * @param leafIndex - The leaf index of the note in the note hash tree. - * @returns the MembershipWitness for the note. - */ - getNoteHashMembershipWitness(leafIndex: bigint): Promise>; + * Returns a membership witness with the sibling path and leaf index in our private function indexed merkle tree. + */ getNoteHashMembershipWitness(leafIndex: bigint): Promise>; + /** + * Returns a membership witness with the sibling path and leaf index in our nullifier indexed merkle tree. + */ getNullifierMembershipWitness(nullifier: Fr): Promise; - /** - * Get the root of the note hash tree. - * - * @returns the root of the note hash tree. + * Returns the root of our note hash merkle tree. */ getNoteHashTreeRoot(): Promise; @@ -77,7 +62,10 @@ export interface ProvingDataOracle { */ getMasterSecretKey(masterPublicKey: Point): Promise; + /** Use debug data to get the function name corresponding to a selector. */ getDebugFunctionName(contractAddress: AztecAddress, selector: FunctionSelector): Promise; + /** Returns a membership witness and leaf index to our public data indexed merkle tree, + * along with an associated SharedMutable containing the class ID to update. */ getUpdatedClassIdHints(contractAddress: AztecAddress): Promise; } diff --git a/yarn-project/pxe/src/kernel_oracle/index.ts b/yarn-project/pxe/src/private_kernel/private_kernel_oracle_impl.ts similarity index 93% rename from yarn-project/pxe/src/kernel_oracle/index.ts rename to yarn-project/pxe/src/private_kernel/private_kernel_oracle_impl.ts index aad67ddb93a2..3a0da8a20d66 100644 --- a/yarn-project/pxe/src/kernel_oracle/index.ts +++ b/yarn-project/pxe/src/private_kernel/private_kernel_oracle_impl.ts @@ -1,4 +1,4 @@ -import { type NOTE_HASH_TREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT, VK_TREE_HEIGHT } from '@aztec/constants'; +import { NOTE_HASH_TREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT, VK_TREE_HEIGHT } from '@aztec/constants'; import type { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import type { Tuple } from '@aztec/foundation/serialize'; @@ -17,15 +17,16 @@ import { SharedMutableValues, SharedMutableValuesWithHash } from '@aztec/stdlib/ import type { NullifierMembershipWitness } from '@aztec/stdlib/trees'; import type { VerificationKeyAsFields } from '@aztec/stdlib/vks'; -import type { ContractDataProvider } from '../storage/contract_data_provider/contract_data_provider.js'; -import type { ProvingDataOracle } from './../kernel_prover/proving_data_oracle.js'; +import type { ContractDataProvider } from '../storage/index.js'; +import type { PrivateKernelOracle } from './private_kernel_oracle.js'; // TODO: Block number should not be "latest". // It should be fixed at the time the proof is being simulated. I.e., it should be the same as the value defined in the constant data. /** * A data oracle that provides information needed for simulating a transaction. */ -export class KernelOracle implements ProvingDataOracle { + +export class PrivateKernelOracleImpl implements PrivateKernelOracle { constructor( private contractDataProvider: ContractDataProvider, private keyStore: KeyStore, diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index bccef6977e6a..a2ba436e4ee2 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -48,7 +48,7 @@ import type { PXEInfo, PrivateKernelProver, } from '@aztec/stdlib/interfaces/client'; -import { type PrivateKernelSimulateOutput, PrivateKernelTailCircuitPublicInputs } from '@aztec/stdlib/kernel'; +import type { PrivateKernelExecutionProofOutput, PrivateKernelTailCircuitPublicInputs } from '@aztec/stdlib/kernel'; import { computeAddressSecret } from '@aztec/stdlib/keys'; import type { LogFilter } from '@aztec/stdlib/logs'; import { getNonNullifiedL1ToL2MessageWitness } from '@aztec/stdlib/messaging'; @@ -57,11 +57,12 @@ import { MerkleTreeId } from '@aztec/stdlib/trees'; import { PrivateExecutionResult, PrivateSimulationResult, - type PublicSimulationOutput, - type Tx, + PublicSimulationOutput, + Tx, type TxEffect, - type TxExecutionRequest, + TxExecutionRequest, type TxHash, + TxProfileResult, TxProvingResult, type TxReceipt, TxSimulationResult, @@ -71,8 +72,11 @@ import { inspect } from 'util'; import type { PXEServiceConfig } from '../config/index.js'; import { getPackageInfo } from '../config/package_info.js'; -import { KernelOracle } from '../kernel_oracle/index.js'; -import { KernelProver, type ProvingConfig } from '../kernel_prover/kernel_prover.js'; +import { + PrivateKernelExecutionProver, + type PrivateKernelExecutionProverConfig, +} from '../private_kernel/private_kernel_execution_prover.js'; +import { PrivateKernelOracleImpl } from '../private_kernel/private_kernel_oracle_impl.js'; import { PXEOracleInterface } from '../pxe_oracle_interface/pxe_oracle_interface.js'; import { AddressDataProvider } from '../storage/address_data_provider/address_data_provider.js'; import { AuthWitnessDataProvider } from '../storage/auth_witness_data_provider/auth_witness_data_provider.js'; @@ -493,7 +497,7 @@ export class PXEService implements PXE { { simulate: false, skipFeeEnforcement: false, - profile: false, + profileMode: 'none', }, ); return new TxProvingResult(privateExecutionResult, publicInputs, clientIvcProof!); @@ -502,6 +506,47 @@ export class PXEService implements PXE { } }); } + public profileTx( + txRequest: TxExecutionRequest, + profileMode: 'full' | 'execution-steps' | 'gates', + msgSender?: AztecAddress, + ): Promise { + // We disable concurrent profiles for consistency with simulateTx. + return this.#putInJobQueue(async () => { + try { + const txInfo = { + origin: txRequest.origin, + functionSelector: txRequest.functionSelector, + simulatePublic: false, + msgSender, + chainId: txRequest.txContext.chainId, + version: txRequest.txContext.version, + authWitnesses: txRequest.authWitnesses.map(w => w.requestHash), + }; + this.log.info( + `Profiling transaction execution request to ${txRequest.functionSelector} at ${txRequest.origin}`, + txInfo, + ); + await this.synchronizer.sync(); + const privateExecutionResult = await this.#executePrivate(txRequest, msgSender); + + const { executionSteps } = await this.#prove(txRequest, this.proofCreator, privateExecutionResult, { + simulate: true, + skipFeeEnforcement: false, + profileMode, + }); + + return new TxProfileResult(executionSteps); + } catch (err: any) { + throw this.contextualizeError( + err, + inspect(txRequest), + `profileMode=${profileMode}`, + `msgSender=${msgSender?.toString() ?? 'undefined'}`, + ); + } + }); + } // TODO(#7456) Prevent msgSender being defined here for the first call public simulateTx( @@ -510,7 +555,6 @@ export class PXEService implements PXE { msgSender: AztecAddress | undefined = undefined, skipTxValidation: boolean = false, skipFeeEnforcement: boolean = false, - profile: boolean = false, scopes?: AztecAddress[], ): Promise { // We disable concurrent simulations since those might execute oracles which read and write to the PXE stores (e.g. @@ -535,16 +579,11 @@ export class PXEService implements PXE { await this.synchronizer.sync(); const privateExecutionResult = await this.#executePrivate(txRequest, msgSender, scopes); - const { publicInputs, profileResult } = await this.#prove( - txRequest, - this.proofCreator, - privateExecutionResult, - { - simulate: !profile, - skipFeeEnforcement, - profile, - }, - ); + const { publicInputs } = await this.#prove(txRequest, this.proofCreator, privateExecutionResult, { + simulate: true, + skipFeeEnforcement, + profileMode: 'none', + }); const privateSimulationResult = new PrivateSimulationResult(privateExecutionResult, publicInputs); const simulatedTx = privateSimulationResult.toSimulatedTx(); @@ -564,7 +603,6 @@ export class PXEService implements PXE { this.log.info(`Simulation completed for ${txHash.toString()} in ${timer.ms()}ms`, { txHash, ...txInfo, - ...(profileResult ? { gateCounts: profileResult.gateCounts } : {}), ...(publicOutput ? { gasUsed: publicOutput.gasUsed, @@ -574,11 +612,7 @@ export class PXEService implements PXE { : {}), }); - return TxSimulationResult.fromPrivateSimulationResultAndPublicOutput( - privateSimulationResult, - publicOutput, - profileResult, - ); + return TxSimulationResult.fromPrivateSimulationResultAndPublicOutput(privateSimulationResult, publicOutput); } catch (err: any) { throw this.contextualizeError( err, @@ -586,7 +620,6 @@ export class PXEService implements PXE { `simulatePublic=${simulatePublic}`, `msgSender=${msgSender?.toString() ?? 'undefined'}`, `skipTxValidation=${skipTxValidation}`, - `profile=${profile}`, `scopes=${scopes?.map(s => s.toString()).join(', ') ?? 'undefined'}`, ); } @@ -741,31 +774,13 @@ export class PXEService implements PXE { this.log.verbose(`Registered protocol contracts in pxe`, registered); } - /** - * Retrieves the simulation parameters required to run an ACIR simulation. - * This includes the contract address, function artifact, and historical tree roots. - * - * @param execRequest - The transaction request object containing details of the contract call. - * @returns An object containing the contract address, function artifact, and historical tree roots. - */ - #getSimulationParameters(execRequest: FunctionCall | TxExecutionRequest) { - const contractAddress = (execRequest as FunctionCall).to ?? (execRequest as TxExecutionRequest).origin; - const functionSelector = - (execRequest as FunctionCall).selector ?? (execRequest as TxExecutionRequest).functionSelector; - - return { - contractAddress, - functionSelector, - }; - } - async #executePrivate( txRequest: TxExecutionRequest, msgSender?: AztecAddress, scopes?: AztecAddress[], ): Promise { // TODO - Pause syncing while simulating. - const { contractAddress, functionSelector } = this.#getSimulationParameters(txRequest); + const { origin: contractAddress, functionSelector } = txRequest; try { const result = await this.simulator.run(txRequest, contractAddress, functionSelector, msgSender, scopes); @@ -789,7 +804,7 @@ export class PXEService implements PXE { * @returns The simulation result containing the outputs of the unconstrained function. */ async #simulateUnconstrained(execRequest: FunctionCall, scopes?: AztecAddress[]) { - const { contractAddress, functionSelector } = this.#getSimulationParameters(execRequest); + const { to: contractAddress, selector: functionSelector } = execRequest; this.log.debug('Executing unconstrained simulator...'); try { @@ -846,19 +861,13 @@ export class PXEService implements PXE { txExecutionRequest: TxExecutionRequest, proofCreator: PrivateKernelProver, privateExecutionResult: PrivateExecutionResult, - { simulate, skipFeeEnforcement, profile }: ProvingConfig, - ): Promise> { - // use the block the tx was simulated against - const block = - privateExecutionResult.entrypoint.publicInputs.historicalHeader.globalVariables.blockNumber.toNumber(); - const kernelOracle = new KernelOracle(this.contractDataProvider, this.keyStore, this.node, block); - const kernelProver = new KernelProver(kernelOracle, proofCreator, !this.proverEnabled); - this.log.debug(`Executing kernel prover (simulate: ${simulate}, profile: ${profile})...`); - return await kernelProver.prove(txExecutionRequest.toTxRequest(), privateExecutionResult, { - simulate, - skipFeeEnforcement, - profile, - }); + config: PrivateKernelExecutionProverConfig, + ): Promise> { + const block = privateExecutionResult.getSimulationBlockNumber(); + const kernelOracle = new PrivateKernelOracleImpl(this.contractDataProvider, this.keyStore, this.node, block); + const kernelTraceProver = new PrivateKernelExecutionProver(kernelOracle, proofCreator, !this.proverEnabled); + this.log.debug(`Executing kernel trace prover (${JSON.stringify(config)})...`); + return await kernelTraceProver.proveWithKernels(txExecutionRequest.toTxRequest(), privateExecutionResult, config); } async #isContractClassPubliclyRegistered(id: Fr): Promise { diff --git a/yarn-project/pxe/src/storage/contract_data_provider/private_functions_tree.ts b/yarn-project/pxe/src/storage/contract_data_provider/private_functions_tree.ts index bf7b31b2a1aa..45a55580c1ac 100644 --- a/yarn-project/pxe/src/storage/contract_data_provider/private_functions_tree.ts +++ b/yarn-project/pxe/src/storage/contract_data_provider/private_functions_tree.ts @@ -2,7 +2,7 @@ import { FUNCTION_TREE_HEIGHT } from '@aztec/constants'; import { Fr } from '@aztec/foundation/fields'; import { assertLength } from '@aztec/foundation/serialize'; import { MembershipWitness, type MerkleTree } from '@aztec/foundation/trees'; -import { type ContractArtifact, FunctionSelector } from '@aztec/stdlib/abi'; +import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/stdlib/abi'; import { type ContractClassWithId, computePrivateFunctionLeaf, @@ -34,7 +34,7 @@ export class PrivateFunctionsTree { * @param selector - The function selector. * @returns The artifact object containing relevant information about the targeted function. */ - public async getFunctionArtifact(selector: FunctionSelector) { + public async getFunctionArtifact(selector: FunctionSelector): Promise { const functionsAndSelectors = await Promise.all( this.artifact.functions.map(async f => ({ f, diff --git a/yarn-project/pxe/src/storage/index.ts b/yarn-project/pxe/src/storage/index.ts index be6fb54e6a57..3d1110efc5a9 100644 --- a/yarn-project/pxe/src/storage/index.ts +++ b/yarn-project/pxe/src/storage/index.ts @@ -6,5 +6,4 @@ export * from './note_data_provider/index.js'; export * from './sync_data_provider/index.js'; export * from './tagging_data_provider/index.js'; export * from './data_provider.js'; - -export const PXE_DATA_SCHEMA_VERSION = 2; +export * from './metadata.js'; diff --git a/yarn-project/pxe/src/storage/metadata.ts b/yarn-project/pxe/src/storage/metadata.ts new file mode 100644 index 000000000000..81d4682e0db0 --- /dev/null +++ b/yarn-project/pxe/src/storage/metadata.ts @@ -0,0 +1 @@ +export const PXE_DATA_SCHEMA_VERSION = 2; diff --git a/yarn-project/simulator/src/private/simulator.ts b/yarn-project/simulator/src/private/simulator.ts index 8a80da482125..6f26c1f25083 100644 --- a/yarn-project/simulator/src/private/simulator.ts +++ b/yarn-project/simulator/src/private/simulator.ts @@ -1,6 +1,6 @@ import { Fr } from '@aztec/foundation/fields'; import { type Logger, createLogger } from '@aztec/foundation/log'; -import type { FunctionCall } from '@aztec/stdlib/abi'; +import type { AbiDecoded, FunctionCall } from '@aztec/stdlib/abi'; import { FunctionSelector, FunctionType } from '@aztec/stdlib/abi'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { CallContext, PrivateExecutionResult, TxExecutionRequest } from '@aztec/stdlib/tx'; @@ -114,7 +114,7 @@ export class AcirSimulator { contractAddress: AztecAddress, selector: FunctionSelector, scopes?: AztecAddress[], - ) { + ): Promise { await verifyCurrentClassId(contractAddress, this.executionDataProvider); const entryPointArtifact = await this.executionDataProvider.getFunctionArtifact(contractAddress, selector); diff --git a/yarn-project/stdlib/src/database-version/version_manager.ts b/yarn-project/stdlib/src/database-version/version_manager.ts index 214afd5b3d1d..83a294077077 100644 --- a/yarn-project/stdlib/src/database-version/version_manager.ts +++ b/yarn-project/stdlib/src/database-version/version_manager.ts @@ -3,6 +3,7 @@ import { jsonParseWithSchemaSync, jsonStringify } from '@aztec/foundation/json-r import { createLogger } from '@aztec/foundation/log'; import fs from 'fs/promises'; +import { inspect } from 'node:util'; import { join } from 'path'; import { z } from 'zod'; @@ -59,6 +60,15 @@ export class DatabaseVersion { .transform(({ schemaVersion, rollupAddress }) => new DatabaseVersion(schemaVersion, rollupAddress)); } + /** Allows for better introspection. */ + public [inspect.custom](): string { + return this.toString(); + } + + public toString(): string { + return this.schemaVersion.toString(); + } + /** * Returns an empty instance */ diff --git a/yarn-project/stdlib/src/interfaces/private_kernel_prover.ts b/yarn-project/stdlib/src/interfaces/private_kernel_prover.ts index 925e308db36e..b71ecbd92b07 100644 --- a/yarn-project/stdlib/src/interfaces/private_kernel_prover.ts +++ b/yarn-project/stdlib/src/interfaces/private_kernel_prover.ts @@ -1,6 +1,5 @@ -import type { WitnessMap } from '@aztec/noir-acvm_js'; - import type { + PrivateExecutionStep, PrivateKernelCircuitPublicInputs, PrivateKernelInitCircuitPrivateInputs, PrivateKernelInnerCircuitPrivateInputs, @@ -101,7 +100,7 @@ export interface PrivateKernelProver { * @param acirs The program bytecode. * @param witnessStack The witnessses for each program bytecode. */ - createClientIvcProof(acirs: Buffer[], witnessStack: WitnessMap[]): Promise; + createClientIvcProof(executionSteps: PrivateExecutionStep[]): Promise; /** * Compute the gate count for a given circuit. diff --git a/yarn-project/stdlib/src/interfaces/pxe.test.ts b/yarn-project/stdlib/src/interfaces/pxe.test.ts index 4ada1acf6d60..15291dded0b2 100644 --- a/yarn-project/stdlib/src/interfaces/pxe.test.ts +++ b/yarn-project/stdlib/src/interfaces/pxe.test.ts @@ -36,7 +36,9 @@ import { UniqueNote } from '../note/index.js'; import type { NotesFilter } from '../note/notes_filter.js'; import { ClientIvcProof } from '../proofs/client_ivc_proof.js'; import { getTokenContractArtifact } from '../tests/fixtures.js'; -import { PrivateExecutionResult, Tx, TxHash, TxProvingResult, TxReceipt, TxSimulationResult } from '../tx/index.js'; +import { PrivateExecutionResult, Tx, TxHash, TxReceipt, TxSimulationResult } from '../tx/index.js'; +import { TxProfileResult } from '../tx/profiled_tx.js'; +import { TxProvingResult } from '../tx/proven_tx.js'; import { TxEffect } from '../tx/tx_effect.js'; import { TxExecutionRequest } from '../tx/tx_execution_request.js'; import type { GetContractClassLogsResponse, GetPublicLogsResponse } from './get_logs_response.js'; @@ -149,6 +151,11 @@ describe('PXESchema', () => { expect(result).toEqual([address]); }); + it('profileTx', async () => { + const result = await context.client.profileTx(await TxExecutionRequest.random(), 'gates'); + expect(result).toBeInstanceOf(TxProfileResult); + }); + it('proveTx', async () => { const result = await context.client.proveTx( await TxExecutionRequest.random(), @@ -158,15 +165,7 @@ describe('PXESchema', () => { }); it('simulateTx(all)', async () => { - const result = await context.client.simulateTx( - await TxExecutionRequest.random(), - true, - address, - false, - true, - false, - [], - ); + const result = await context.client.simulateTx(await TxExecutionRequest.random(), true, address, false, true, []); expect(result).toBeInstanceOf(TxSimulationResult); }); @@ -183,7 +182,6 @@ describe('PXESchema', () => { undefined, undefined, undefined, - undefined, ); expect(result).toBeInstanceOf(TxSimulationResult); }); @@ -381,6 +379,18 @@ class MockPXE implements PXE { getContracts(): Promise { return Promise.resolve([this.address]); } + profileTx( + txRequest: TxExecutionRequest, + profileMode: 'gates' | 'full' | 'execution-steps' | 'none', + msgSender?: AztecAddress, + ): Promise { + expect(txRequest).toBeInstanceOf(TxExecutionRequest); + expect(profileMode).toMatch(/gates|debug/); + if (msgSender) { + expect(msgSender).toBeInstanceOf(AztecAddress); + } + return Promise.resolve(new TxProfileResult([])); + } proveTx(txRequest: TxExecutionRequest, privateExecutionResult: PrivateExecutionResult): Promise { expect(txRequest).toBeInstanceOf(TxExecutionRequest); expect(privateExecutionResult).toBeInstanceOf(PrivateExecutionResult); @@ -394,7 +404,6 @@ class MockPXE implements PXE { msgSender?: AztecAddress | undefined, _skipTxValidation?: boolean | undefined, _enforceFeePayment?: boolean | undefined, - _profile?: boolean | undefined, scopes?: AztecAddress[] | undefined, ): Promise { expect(txRequest).toBeInstanceOf(TxExecutionRequest); diff --git a/yarn-project/stdlib/src/interfaces/pxe.ts b/yarn-project/stdlib/src/interfaces/pxe.ts index 018fbbf6600f..ed6e9d8834b5 100644 --- a/yarn-project/stdlib/src/interfaces/pxe.ts +++ b/yarn-project/stdlib/src/interfaces/pxe.ts @@ -29,15 +29,9 @@ import { type LogFilter, LogFilterSchema } from '../logs/log_filter.js'; import { UniqueNote } from '../note/extended_note.js'; import { type NotesFilter, NotesFilterSchema } from '../note/notes_filter.js'; import { AbiDecodedSchema, optional, schemas } from '../schemas/schemas.js'; -import { - PrivateExecutionResult, - Tx, - TxExecutionRequest, - TxHash, - TxProvingResult, - TxReceipt, - TxSimulationResult, -} from '../tx/index.js'; +import { PrivateExecutionResult, Tx, TxExecutionRequest, TxHash, TxReceipt, TxSimulationResult } from '../tx/index.js'; +import { TxProfileResult } from '../tx/profiled_tx.js'; +import { TxProvingResult } from '../tx/proven_tx.js'; import { TxEffect } from '../tx/tx_effect.js'; import { type GetContractClassLogsResponse, @@ -166,14 +160,13 @@ export interface PXE { getContracts(): Promise; /** - * Creates a proving result based on the provided preauthenticated execution request and the results - * of executing the private part of the transaction. This will assemble the zero-knowledge proof for the private execution. - * It returns an object that contains the proof and public inputs of the tail circuit, which can be converted into a Tx ready to be sent to the network + * Proves the private portion of a simulated transaction, ready to send to the network + * (where valiators prove the public portion). * * @param txRequest - An authenticated tx request ready for proving * @param privateExecutionResult - The result of the private execution of the transaction - * @returns A transaction ready to be sent to the network for execution. - * @throws If the code for the functions executed in this transaction has not been made available via `addContracts`. + * @returns A result containing the proof and public inputs of the tail circuit. + * @throws If contract code not found, or public simulation reverts. * Also throws if simulatePublic is true and public simulation reverts. */ proveTx(txRequest: TxExecutionRequest, privateExecutionResult: PrivateExecutionResult): Promise; @@ -193,10 +186,10 @@ export interface PXE { * @param simulatePublic - Whether to simulate the public part of the transaction. * @param msgSender - (Optional) The message sender to use for the simulation. * @param skipTxValidation - (Optional) If false, this function throws if the transaction is unable to be included in a block at the current state. - * @param profile - (Optional) If true, will run the private kernel prover with profiling enabled and include the result (gate count) in TxSimulationResult. + * @param skipFeeEnforcement - (Optional) If false, fees are enforced. * @param scopes - (Optional) The accounts whose notes we can access in this call. Currently optional and will default to all. * @returns A simulated transaction result object that includes public and private return values. - * @throws If the code for the functions executed in this transaction has not been made available via `addContracts`. + * @throws If the code for the functions executed in this transaction have not been made available via `addContracts`. * Also throws if simulatePublic is true and public simulation reverts. */ simulateTx( @@ -205,10 +198,24 @@ export interface PXE { msgSender?: AztecAddress, skipTxValidation?: boolean, skipFeeEnforcement?: boolean, - profile?: boolean, scopes?: AztecAddress[], ): Promise; + /** + * Profiles a transaction, reporting gate counts (unless disabled) and returns an execution trace. + * + * @param txRequest - An authenticated tx request ready for simulation + * @param msgSender - (Optional) The message sender to use for the simulation. + * @param skipTxValidation - (Optional) If false, this function throws if the transaction is unable to be included in a block at the current state. + * @returns A trace of the program execution with gate counts. + * @throws If the code for the functions executed in this transaction have not been made available via `addContracts`. + */ + profileTx( + txRequest: TxExecutionRequest, + profileMode: 'gates' | 'execution-steps' | 'full', + msgSender?: AztecAddress, + ): Promise; + /** * Sends a transaction to an Aztec node to be broadcasted to the network and mined. * @param tx - The transaction as created via `proveTx`. @@ -473,6 +480,14 @@ export const PXESchema: ApiSchemaFor = { updateContract: z.function().args(schemas.AztecAddress, ContractArtifactSchema).returns(z.void()), getContracts: z.function().returns(z.array(schemas.AztecAddress)), proveTx: z.function().args(TxExecutionRequest.schema, PrivateExecutionResult.schema).returns(TxProvingResult.schema), + profileTx: z + .function() + .args( + TxExecutionRequest.schema, + z.union([z.literal('gates'), z.literal('full'), z.literal('execution-steps')]), + optional(schemas.AztecAddress), + ) + .returns(TxProfileResult.schema), simulateTx: z .function() .args( @@ -481,7 +496,6 @@ export const PXESchema: ApiSchemaFor = { optional(schemas.AztecAddress), optional(z.boolean()), optional(z.boolean()), - optional(z.boolean()), optional(z.array(schemas.AztecAddress)), ) .returns(TxSimulationResult.schema), diff --git a/yarn-project/stdlib/src/kernel/index.ts b/yarn-project/stdlib/src/kernel/index.ts index 62f6bbd6c481..c5fed190a2c0 100644 --- a/yarn-project/stdlib/src/kernel/index.ts +++ b/yarn-project/stdlib/src/kernel/index.ts @@ -26,5 +26,5 @@ export * from './note_hash.js'; export * from './private_log_data.js'; export * from './private_call_request.js'; export * from './private_validation_requests.js'; -export * from './private_kernel_prover_profile_result.js'; export * from './private_kernel_simulated_output.js'; +export * from './private_kernel_prover_output.js'; diff --git a/yarn-project/stdlib/src/kernel/private_kernel_prover_output.ts b/yarn-project/stdlib/src/kernel/private_kernel_prover_output.ts new file mode 100644 index 000000000000..ddbc78afdfa3 --- /dev/null +++ b/yarn-project/stdlib/src/kernel/private_kernel_prover_output.ts @@ -0,0 +1,44 @@ +import { bufferSchema, mapSchema } from '@aztec/foundation/schemas'; +import type { WitnessMap } from '@aztec/noir-acvm_js'; + +import { z } from 'zod'; + +import type { ClientIvcProof } from '../proofs/client_ivc_proof.js'; +import type { VerificationKeyAsFields } from '../vks/verification_key.js'; +import type { PrivateKernelCircuitPublicInputs } from './private_kernel_circuit_public_inputs.js'; +import type { PrivateKernelTailCircuitPublicInputs } from './private_kernel_tail_circuit_public_inputs.js'; + +export const PrivateExecutionStepSchema = z.object({ + functionName: z.string(), + gateCount: z.number().optional(), + bytecode: bufferSchema, + witness: mapSchema(z.number(), z.string()), +}); + +/** + * Represents either a simulated private kernel circuit or one of our application function circuits. + */ +export interface PrivateExecutionStep { + functionName: string; + gateCount?: number; + bytecode: Buffer; + witness: WitnessMap; +} + +/** Represents the output of proven PrivateKernelSimulateOutput.*/ +export interface PrivateKernelExecutionProofOutput< + PublicInputsType extends PrivateKernelCircuitPublicInputs | PrivateKernelTailCircuitPublicInputs, +> { + /** The public inputs used by the proof generation process. */ + publicInputs: PublicInputsType; + /** The private IVC proof optimized for user devices. It will be consumed by an Aztec prover, + * which recursively verifies it through the "tube" circuit.*/ + clientIvcProof: ClientIvcProof; + verificationKey: VerificationKeyAsFields; + /** + * The trace the clientIvcProof corresponds to. + * A trace of app circuits interleaved with private kernel circuits. + * If simulate is ran with profiling mode, also includes gate counts. + */ + executionSteps: PrivateExecutionStep[]; +} diff --git a/yarn-project/stdlib/src/kernel/private_kernel_prover_profile_result.ts b/yarn-project/stdlib/src/kernel/private_kernel_prover_profile_result.ts deleted file mode 100644 index 0488d4d5bdd2..000000000000 --- a/yarn-project/stdlib/src/kernel/private_kernel_prover_profile_result.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { z } from 'zod'; - -export const PrivateKernelProverProfileResultSchema = z.object({ - gateCounts: z.array(z.object({ circuitName: z.string(), gateCount: z.number() })), -}); - -export type PrivateKernelProverProfileResult = z.infer; diff --git a/yarn-project/stdlib/src/kernel/private_kernel_simulated_output.ts b/yarn-project/stdlib/src/kernel/private_kernel_simulated_output.ts index ca71bca96b39..cf1cf02358b9 100644 --- a/yarn-project/stdlib/src/kernel/private_kernel_simulated_output.ts +++ b/yarn-project/stdlib/src/kernel/private_kernel_simulated_output.ts @@ -1,31 +1,22 @@ import type { WitnessMap } from '@aztec/noir-acvm_js'; -import type { ClientIvcProof } from '../proofs/client_ivc_proof.js'; import type { VerificationKeyAsFields } from '../vks/verification_key.js'; import type { PrivateKernelCircuitPublicInputs } from './private_kernel_circuit_public_inputs.js'; -import type { PrivateKernelProverProfileResult } from './private_kernel_prover_profile_result.js'; import type { PrivateKernelTailCircuitPublicInputs } from './private_kernel_tail_circuit_public_inputs.js'; /** * Represents the output of the proof creation process for init and inner private kernel circuit. * Contains the public inputs required for the init and inner private kernel circuit and the generated proof. */ -export type PrivateKernelSimulateOutput< +export interface PrivateKernelSimulateOutput< PublicInputsType extends PrivateKernelCircuitPublicInputs | PrivateKernelTailCircuitPublicInputs, -> = { +> { /** The public inputs required for the proof generation process. */ publicInputs: PublicInputsType; - - clientIvcProof?: ClientIvcProof; - - verificationKey: VerificationKeyAsFields; - outputWitness: WitnessMap; - + verificationKey: VerificationKeyAsFields; bytecode: Buffer; - - profileResult?: PrivateKernelProverProfileResult; -}; +} /** * Represents the output of the circuit simulation process for init and inner private kernel circuit. diff --git a/yarn-project/stdlib/src/tx/index.ts b/yarn-project/stdlib/src/tx/index.ts index e41502ba4948..e1db823aa1ce 100644 --- a/yarn-project/stdlib/src/tx/index.ts +++ b/yarn-project/stdlib/src/tx/index.ts @@ -16,6 +16,8 @@ export * from './tx_hash.js'; export * from './tx_receipt.js'; export * from './tx.js'; export * from './processed_tx.js'; +export * from './proven_tx.js'; +export * from './profiled_tx.js'; export * from './simulated_tx.js'; export * from './tx_effect.js'; export * from './public_simulation_output.js'; diff --git a/yarn-project/stdlib/src/tx/private_execution_result.ts b/yarn-project/stdlib/src/tx/private_execution_result.ts index c6c61b72cd8b..d2ad175dc4f9 100644 --- a/yarn-project/stdlib/src/tx/private_execution_result.ts +++ b/yarn-project/stdlib/src/tx/private_execution_result.ts @@ -115,6 +115,13 @@ export class PrivateExecutionResult { static async random(nested = 1): Promise { return new PrivateExecutionResult(await PrivateCallExecutionResult.random(nested), Fr.random()); } + + /** + * The block number that this execution was simulated with. + */ + getSimulationBlockNumber(): number { + return this.entrypoint.publicInputs.historicalHeader.globalVariables.blockNumber.toNumber(); + } } /** diff --git a/yarn-project/stdlib/src/tx/profiled_tx.test.ts b/yarn-project/stdlib/src/tx/profiled_tx.test.ts new file mode 100644 index 000000000000..272a5a2ca379 --- /dev/null +++ b/yarn-project/stdlib/src/tx/profiled_tx.test.ts @@ -0,0 +1,11 @@ +import { jsonStringify } from '@aztec/foundation/json-rpc'; + +import { TxProfileResult } from './profiled_tx.js'; + +describe('profiled_tx', () => { + it('convert to and from json', () => { + const profile = TxProfileResult.random(); + const parsed = TxProfileResult.schema.parse(JSON.parse(jsonStringify(profile))); + expect(parsed).toEqual(profile); + }); +}); diff --git a/yarn-project/stdlib/src/tx/profiled_tx.ts b/yarn-project/stdlib/src/tx/profiled_tx.ts new file mode 100644 index 000000000000..8e6a4a86f56b --- /dev/null +++ b/yarn-project/stdlib/src/tx/profiled_tx.ts @@ -0,0 +1,27 @@ +import type { ZodFor } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + +import { type PrivateExecutionStep, PrivateExecutionStepSchema } from '../kernel/private_kernel_prover_output.js'; + +export class TxProfileResult { + constructor(public executionSteps: PrivateExecutionStep[]) {} + + static get schema(): ZodFor { + return z + .object({ + executionSteps: z.array(PrivateExecutionStepSchema), + }) + .transform(({ executionSteps }) => new TxProfileResult(executionSteps)); + } + + static random(): TxProfileResult { + return new TxProfileResult([ + { + functionName: 'random', + bytecode: Buffer.from('random'), + witness: new Map([[1, 'random']]), + }, + ]); + } +} diff --git a/yarn-project/stdlib/src/tx/proven_tx.test.ts b/yarn-project/stdlib/src/tx/proven_tx.test.ts new file mode 100644 index 000000000000..00f6dc971081 --- /dev/null +++ b/yarn-project/stdlib/src/tx/proven_tx.test.ts @@ -0,0 +1,11 @@ +import { jsonStringify } from '@aztec/foundation/json-rpc'; + +import { TxProvingResult } from './proven_tx.js'; + +describe('proven_tx', () => { + it('convert to and from json', async () => { + const tx = await TxProvingResult.random(); + const parsed = TxProvingResult.schema.parse(JSON.parse(jsonStringify(tx))); + expect(parsed).toEqual(tx); + }); +}); diff --git a/yarn-project/stdlib/src/tx/proven_tx.ts b/yarn-project/stdlib/src/tx/proven_tx.ts new file mode 100644 index 000000000000..b05378a49c6d --- /dev/null +++ b/yarn-project/stdlib/src/tx/proven_tx.ts @@ -0,0 +1,58 @@ +import type { FieldsOf } from '@aztec/foundation/types'; + +import { z } from 'zod'; + +import { PrivateKernelTailCircuitPublicInputs } from '../kernel/private_kernel_tail_circuit_public_inputs.js'; +import { ClientIvcProof } from '../proofs/client_ivc_proof.js'; +import { + PrivateExecutionResult, + collectEnqueuedPublicFunctionCalls, + collectPublicTeardownFunctionCall, + collectSortedContractClassLogs, +} from './private_execution_result.js'; +import { Tx } from './tx.js'; + +export class TxProvingResult { + constructor( + public privateExecutionResult: PrivateExecutionResult, + public publicInputs: PrivateKernelTailCircuitPublicInputs, + public clientIvcProof: ClientIvcProof, + ) {} + + toTx(): Tx { + const contractClassLogs = collectSortedContractClassLogs(this.privateExecutionResult); + const enqueuedPublicFunctions = collectEnqueuedPublicFunctionCalls(this.privateExecutionResult); + const teardownPublicFunction = collectPublicTeardownFunctionCall(this.privateExecutionResult); + + const tx = new Tx( + this.publicInputs, + this.clientIvcProof, + contractClassLogs, + enqueuedPublicFunctions, + teardownPublicFunction, + ); + return tx; + } + + static get schema() { + return z + .object({ + privateExecutionResult: PrivateExecutionResult.schema, + publicInputs: PrivateKernelTailCircuitPublicInputs.schema, + clientIvcProof: ClientIvcProof.schema, + }) + .transform(TxProvingResult.from); + } + + static from(fields: FieldsOf) { + return new TxProvingResult(fields.privateExecutionResult, fields.publicInputs, fields.clientIvcProof); + } + + static async random() { + return new TxProvingResult( + await PrivateExecutionResult.random(), + PrivateKernelTailCircuitPublicInputs.empty(), + ClientIvcProof.empty(), + ); + } +} diff --git a/yarn-project/stdlib/src/tx/simulated_tx.test.ts b/yarn-project/stdlib/src/tx/simulated_tx.test.ts index c515c983e480..814cef46809f 100644 --- a/yarn-project/stdlib/src/tx/simulated_tx.test.ts +++ b/yarn-project/stdlib/src/tx/simulated_tx.test.ts @@ -1,7 +1,7 @@ import { jsonStringify } from '@aztec/foundation/json-rpc'; import { mockSimulatedTx } from '../tests/mocks.js'; -import { TxProvingResult, TxSimulationResult } from './simulated_tx.js'; +import { TxSimulationResult } from './simulated_tx.js'; describe('simulated_tx', () => { describe('TxSimulationResult', () => { @@ -19,15 +19,4 @@ describe('simulated_tx', () => { expect(TxSimulationResult.schema.parse(JSON.parse(jsonStringify(simulatedTx)))).toEqual(simulatedTx); }); }); - - describe('TxProvingResult', () => { - let tx: TxProvingResult; - beforeEach(async () => { - tx = await TxProvingResult.random(); - }); - - it('convert to and from json', () => { - expect(TxProvingResult.schema.parse(JSON.parse(jsonStringify(tx)))).toEqual(tx); - }); - }); }); diff --git a/yarn-project/stdlib/src/tx/simulated_tx.ts b/yarn-project/stdlib/src/tx/simulated_tx.ts index 7c4d1ae8e881..6fe29b4aefc1 100644 --- a/yarn-project/stdlib/src/tx/simulated_tx.ts +++ b/yarn-project/stdlib/src/tx/simulated_tx.ts @@ -5,10 +5,6 @@ import { z } from 'zod'; import { Gas } from '../gas/gas.js'; import type { GasUsed } from '../gas/gas_used.js'; -import { - type PrivateKernelProverProfileResult, - PrivateKernelProverProfileResultSchema, -} from '../kernel/private_kernel_prover_profile_result.js'; import { PrivateKernelTailCircuitPublicInputs } from '../kernel/private_kernel_tail_circuit_public_inputs.js'; import { ClientIvcProof } from '../proofs/client_ivc_proof.js'; import { @@ -47,15 +43,12 @@ export class PrivateSimulationResult { } } -export class TxSimulationResult extends PrivateSimulationResult { +export class TxSimulationResult { constructor( - privateExecutionResult: PrivateExecutionResult, - publicInputs: PrivateKernelTailCircuitPublicInputs, + public privateExecutionResult: PrivateExecutionResult, + public publicInputs: PrivateKernelTailCircuitPublicInputs, public publicOutput?: PublicSimulationOutput, - public profileResult?: PrivateKernelProverProfileResult, - ) { - super(privateExecutionResult, publicInputs); - } + ) {} get gasUsed(): GasUsed { return ( @@ -74,34 +67,22 @@ export class TxSimulationResult extends PrivateSimulationResult { privateExecutionResult: PrivateExecutionResult.schema, publicInputs: PrivateKernelTailCircuitPublicInputs.schema, publicOutput: PublicSimulationOutput.schema.optional(), - profileResult: PrivateKernelProverProfileResultSchema.optional(), }) .transform(TxSimulationResult.from); } static from(fields: Omit, 'gasUsed'>) { - return new TxSimulationResult( - fields.privateExecutionResult, - fields.publicInputs, - fields.publicOutput, - fields.profileResult, - ); - } - - getPublicReturnValues() { - return this.publicOutput ? this.publicOutput.publicReturnValues : []; + return new TxSimulationResult(fields.privateExecutionResult, fields.publicInputs, fields.publicOutput); } static fromPrivateSimulationResultAndPublicOutput( privateSimulationResult: PrivateSimulationResult, publicOutput?: PublicSimulationOutput, - profileResult?: PrivateKernelProverProfileResult, ) { return new TxSimulationResult( privateSimulationResult.privateExecutionResult, privateSimulationResult.publicInputs, publicOutput, - profileResult, ); } @@ -112,50 +93,17 @@ export class TxSimulationResult extends PrivateSimulationResult { await PublicSimulationOutput.random(), ); } -} - -export class TxProvingResult { - constructor( - public privateExecutionResult: PrivateExecutionResult, - public publicInputs: PrivateKernelTailCircuitPublicInputs, - public clientIvcProof: ClientIvcProof, - ) {} - toTx(): Tx { - const contractClassLogs = collectSortedContractClassLogs(this.privateExecutionResult); - const enqueuedPublicFunctions = collectEnqueuedPublicFunctionCalls(this.privateExecutionResult); - const teardownPublicFunction = collectPublicTeardownFunctionCall(this.privateExecutionResult); - - const tx = new Tx( - this.publicInputs, - this.clientIvcProof, - contractClassLogs, - enqueuedPublicFunctions, - teardownPublicFunction, - ); - return tx; - } - - static get schema() { - return z - .object({ - privateExecutionResult: PrivateExecutionResult.schema, - publicInputs: PrivateKernelTailCircuitPublicInputs.schema, - clientIvcProof: ClientIvcProof.schema, - }) - .transform(TxProvingResult.from); + getPrivateReturnValues() { + return new PrivateSimulationResult(this.privateExecutionResult, this.publicInputs).getPrivateReturnValues(); } - static from(fields: FieldsOf) { - return new TxProvingResult(fields.privateExecutionResult, fields.publicInputs, fields.clientIvcProof); + toSimulatedTx(): Tx { + return new PrivateSimulationResult(this.privateExecutionResult, this.publicInputs).toSimulatedTx(); } - static async random() { - return new TxProvingResult( - await PrivateExecutionResult.random(), - PrivateKernelTailCircuitPublicInputs.empty(), - ClientIvcProof.empty(), - ); + getPublicReturnValues() { + return this.publicOutput ? this.publicOutput.publicReturnValues : []; } } diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index 41bee41fca7f..e14568425c29 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -550,7 +550,8 @@ export class TXEService { } async getLogByTag(tag: ForeignCallSingle) { - const log = await this.typedOracle.getLogByTag(fromSingle(tag)); + // TODO(AD): this was warning that getLogByTag did not return a promise. + const log = await Promise.resolve(this.typedOracle.getLogByTag(fromSingle(tag))); if (log == null) { return toForeignCallResult([toSingle(Fr.ZERO), ...LogWithTxData.noirSerializationOfEmpty().map(toSingleOrArray)]); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 0039a942836e..836ef445e48f 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -467,9 +467,11 @@ __metadata: "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^" "@aztec/noir-contracts.js": "workspace:^" + "@aztec/noir-noirc_abi": "workspace:^" "@aztec/pxe": "workspace:^" "@aztec/stdlib": "workspace:^" "@jest/globals": "npm:^29.5.0" + "@msgpack/msgpack": "npm:^3.0.0-beta2" "@types/jest": "npm:^29.5.0" "@types/node": "npm:^18.7.23" "@types/source-map-support": "npm:^0.5.10" @@ -583,6 +585,7 @@ __metadata: "@aztec/l1-artifacts": "workspace:^" "@aztec/merkle-tree": "workspace:^" "@aztec/noir-contracts.js": "workspace:^" + "@aztec/noir-noirc_abi": "workspace:^" "@aztec/noir-protocol-circuits-types": "workspace:^" "@aztec/p2p": "workspace:^" "@aztec/protocol-contracts": "workspace:^" @@ -597,6 +600,7 @@ __metadata: "@aztec/world-state": "workspace:^" "@iarna/toml": "npm:^2.2.5" "@jest/globals": "npm:^29.5.0" + "@msgpack/msgpack": "npm:^3.0.0-beta2" "@noble/curves": "npm:^1.0.0" "@swc/core": "npm:^1.4.11" "@swc/jest": "npm:^0.2.36"