diff --git a/.github/ci3.sh b/.github/ci3.sh index b06e1930222c..56ac9971dfa2 100755 --- a/.github/ci3.sh +++ b/.github/ci3.sh @@ -10,7 +10,7 @@ set -euo pipefail CI_MODE="${1:?CI_MODE must be provided as first argument}" shift -NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source +NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source_base function setup_environment { echo_header "Setup" diff --git a/.github/ci3_labels_to_env.sh b/.github/ci3_labels_to_env.sh index 10bfda26c4ff..ab8c3347f0f1 100755 --- a/.github/ci3_labels_to_env.sh +++ b/.github/ci3_labels_to_env.sh @@ -50,8 +50,8 @@ function main { if [ -n "$pr_number" ]; then local head_branch head_branch=$(GH_TOKEN="$GITHUB_TOKEN" gh pr view "$pr_number" --json headRefName -q '.headRefName' 2>/dev/null || true) - if [ "$head_branch" == "merge-train/spartan" ]; then - echo "Merge-train/spartan PR detected, using merge-queue-heavy mode" >&2 + if [ "$head_branch" == "merge-train/spartan" ] || [ "$head_branch" == "merge-train/ci" ]; then + echo "Merge-train PR ($head_branch) detected, using merge-queue-heavy mode" >&2 ci_mode="merge-queue-heavy" fi fi diff --git a/.github/local_workflow.sh b/.github/local_workflow.sh index ac32b623a4fe..bf37b09fafe5 100755 --- a/.github/local_workflow.sh +++ b/.github/local_workflow.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Runs a github workflow locally. # diff --git a/.github/workflows/merge-train-create-pr.yml b/.github/workflows/merge-train-create-pr.yml index a596af51c55e..03d6f8f8d6b9 100644 --- a/.github/workflows/merge-train-create-pr.yml +++ b/.github/workflows/merge-train-create-pr.yml @@ -47,7 +47,7 @@ jobs: # Create PR with ci-no-squash label labels="ci-no-squash" - if [[ "$branch" == "merge-train/spartan" ]]; then + if [[ "$branch" == "merge-train/spartan" || "$branch" == "merge-train/ci" ]]; then labels="$labels,ci-full-no-test-cache" fi gh pr create --base "$base_branch" --head "$branch" \ diff --git a/Makefile b/Makefile index 4a33c3687041..c366659a0f5a 100644 --- a/Makefile +++ b/Makefile @@ -188,7 +188,7 @@ bb-docs: $(call build,$@,barretenberg/docs) # BB Solidity - Solidity verifier contracts -bb-sol: bb-cpp-native +bb-sol: bb-cpp-native bb-crs $(call build,$@,barretenberg/sol) #============================================================================== diff --git a/aztec-up/bootstrap.sh b/aztec-up/bootstrap.sh index 250483410c15..9e233b810ce2 100755 --- a/aztec-up/bootstrap.sh +++ b/aztec-up/bootstrap.sh @@ -4,6 +4,12 @@ source $(git rev-parse --show-toplevel)/ci3/source_bootstrap hash=$(hash_str $(cache_content_hash ^aztec-up/) $(../yarn-project/bootstrap.sh hash)) function build { + # Noop if user doesn't have docker. + if ! command -v docker &>/dev/null; then + echo "Docker not installed. Skipping..." + return + fi + # Create versions.json so we know what to install. ../bootstrap.sh versions > ./bin/0.0.1/versions echo "Versions:" @@ -147,8 +153,9 @@ function prep_test_mac { fi # Cleanup background processes on exit. - local pids=() - trap 'kill "${pids[@]}" &>/dev/null || true' EXIT + # Note: can't use local - the trap fires after function scope is gone. + _bg_pids=() + trap 'kill "${_bg_pids[@]}" &>/dev/null || true' EXIT # Start Verdaccio in offline mode (no uplinks), bound to all interfaces. cat > /tmp/verdaccio-mac-test.yaml </dev/null & - pids+=($!) + _bg_pids+=($!) while ! nc -z localhost $verdaccio_port &>/dev/null; do sleep 1; done echo "Verdaccio running on 0.0.0.0:$verdaccio_port" # Serve bin/ directory over HTTP to mimic S3-hosted install scripts. python3 -m http.server $http_port --directory ./bin --bind 0.0.0.0 &>/dev/null & - pids+=($!) + _bg_pids+=($!) while ! nc -z localhost $http_port &>/dev/null; do sleep 1; done echo "HTTP server running on 0.0.0.0:$http_port (serving ./bin/)" } @@ -235,13 +242,20 @@ export -f install_on_mac_vm launch_and_install_on_mac_vm # Assumes a macos vm is already running. # Starts services, and runs install script on the mac vm via ssh. -function test_mac { - echo_header "aztec-up test_mac" +function test_on_mac_vm { local mac_name="${1:?Mac vm name (e.g. 14)}" + echo_header "aztec-up test_on_mac_vm" prep_test_mac install_on_mac_vm $mac_name } +function test_mac { + local mac_name="${1:?Mac vm name (e.g. 14)}" + echo_header "aztec-up test_mac" + prep_test_mac + launch_and_install_on_mac_vm $mac_name +} + # Starts services, launches a mac vm for each version and runs install script via ssh. function test_macs { echo_header "aztec-up test_macs" diff --git a/barretenberg/cpp/CMakePresets.json b/barretenberg/cpp/CMakePresets.json index d54ff10196f5..b38e201cdcd1 100644 --- a/barretenberg/cpp/CMakePresets.json +++ b/barretenberg/cpp/CMakePresets.json @@ -35,13 +35,13 @@ }, { "name": "clang20", - "displayName": "Build with Zig (glibc 2.35)", - "description": "Build with Zig-wrapped Clang targeting glibc 2.35 (Ubuntu 22.04+)", + "displayName": "Build with Zig (glibc 2.35 on Linux)", + "description": "Build with Zig-wrapped Clang. On Linux, pins glibc 2.35 (Ubuntu 22.04+). On macOS, uses native target.", "inherits": "default", "binaryDir": "build", "environment": { - "CC": "zig cc -target native-linux-gnu.2.35", - "CXX": "zig c++ -target native-linux-gnu.2.35" + "CC": "${sourceDir}/scripts/zig-cc.sh", + "CXX": "${sourceDir}/scripts/zig-c++.sh" }, "cacheVariables": { "CMAKE_AR": "${sourceDir}/scripts/zig-ar.sh", diff --git a/barretenberg/cpp/bootstrap.sh b/barretenberg/cpp/bootstrap.sh index b2c78fd1eba2..1b624054f273 100755 --- a/barretenberg/cpp/bootstrap.sh +++ b/barretenberg/cpp/bootstrap.sh @@ -35,6 +35,13 @@ function inject_version { # Version starts immediately after the sentinel local version_offset=$((sentinel_offset + ${#sentinel})) printf "$version\0" | dd of="$binary" bs=1 seek=$version_offset conv=notrunc 2>/dev/null + + # Re-sign after modifying the binary. + if [[ "$(os)" == "macos" ]]; then + codesign -s - -f "$binary" 2>/dev/null || true + elif llvm-objdump --macho --private-header "$binary" &>/dev/null; then + ldid -S "$binary" + fi } # Define build commands for each preset @@ -55,6 +62,7 @@ function build_preset() { function build_native_objects { set -eu if ! cache_exists barretenberg-$native_preset-$hash.zst; then + (flock -x 200 && cd src/barretenberg/nodejs_module && yarn --immutable) 200>/tmp/bb-yarn.lock cmake --preset "$native_preset" targets=$(cmake --build --preset "$native_preset" --target help | awk -F: '$1 ~ /(_objects|_tests|_bench|_gen|.a)$/ && $1 !~ /^cmake_/{print $1}' | tr '\n' ' ') cmake --build --preset "$native_preset" --target $targets nodejs_module @@ -87,6 +95,7 @@ function build_cross_objects { set -eu target=$1 if ! cache_exists barretenberg-$target-$hash.zst; then + (flock -x 200 && cd src/barretenberg/nodejs_module && yarn --immutable) 200>/tmp/bb-yarn.lock build_preset zig-$target --target barretenberg nodejs_module vm2_stub circuit_checker honk fi } @@ -96,17 +105,13 @@ function build_cross_objects { function build_cross { set -eu target=$1 - is_macos=${2:-false} if ! cache_download barretenberg-$target-$hash.zst; then + (flock -x 200 && cd src/barretenberg/nodejs_module && yarn --immutable) 200>/tmp/bb-yarn.lock build_preset zig-$target --target bb --target nodejs_module --target bb-external cache_upload barretenberg-$target-$hash.zst build-zig-$target/{bin,lib} fi # Always inject version (even for cached binaries) to ensure correct version on release inject_version build-zig-$target/bin/bb - # Code sign for macOS after version injection (must be last modification to binary) - if [ "$is_macos" == "true" ]; then - ldid -S build-zig-$target/bin/bb - fi } # Build static library (.a) for iOS using Zig cross-compilation from Linux. @@ -294,8 +299,6 @@ function build { rm -rf build* fi - (cd src/barretenberg/nodejs_module && yarn --frozen-lockfile --prefer-offline) - if semver check "$REF_NAME" && [[ "$(arch)" == "amd64" ]]; then # Download mobile SDKs before parallel builds (shared across presets) bash scripts/download-ios-sdk.sh @@ -306,8 +309,8 @@ function build { "build_wasm" \ "build_wasm_threads" \ "build_cross arm64-linux" \ - "build_cross amd64-macos true" \ - "build_cross arm64-macos true" \ + "build_cross amd64-macos" \ + "build_cross arm64-macos" \ "build_ios zig-arm64-ios" \ "build_ios zig-arm64-ios-sim" \ "build_android zig-arm64-android" \ @@ -325,7 +328,7 @@ function build { if [ "$(arch)" == "amd64" ] && [ "$CI_FULL" -eq 1 ]; then bash scripts/download-ios-sdk.sh bash scripts/download-android-sysroot.sh - builds+=("build_cross arm64-macos true" build_smt_verification "build_ios zig-arm64-ios" "build_ios zig-arm64-ios-sim" "build_android zig-arm64-android" "build_android zig-x86_64-android") + builds+=("build_cross arm64-macos" build_smt_verification "build_ios zig-arm64-ios" "build_ios zig-arm64-ios-sim" "build_android zig-arm64-android" "build_android zig-x86_64-android") fi parallel --line-buffered --tag --halt now,fail=1 "denoise {}" ::: "${builds[@]}" fi diff --git a/barretenberg/cpp/cmake/avm-transpiler.cmake b/barretenberg/cpp/cmake/avm-transpiler.cmake index 574827aa39bd..6e4546ee9a6f 100644 --- a/barretenberg/cpp/cmake/avm-transpiler.cmake +++ b/barretenberg/cpp/cmake/avm-transpiler.cmake @@ -20,3 +20,7 @@ add_definitions(-DENABLE_AVM_TRANSPILER) message(STATUS "avm-transpiler library: ${AVM_TRANSPILER_LIB}") message(STATUS "avm-transpiler include: ${AVM_TRANSPILER_INCLUDE}") + +if(APPLE AND NOT CMAKE_CROSSCOMPILING) + target_link_libraries(avm_transpiler INTERFACE "-framework CoreFoundation") +endif() diff --git a/barretenberg/cpp/cmake/module.cmake b/barretenberg/cpp/cmake/module.cmake index e70bb3aa4515..804715be86ec 100644 --- a/barretenberg/cpp/cmake/module.cmake +++ b/barretenberg/cpp/cmake/module.cmake @@ -204,7 +204,7 @@ function(barretenberg_module_with_sources MODULE_NAME) endif() if(NOT WASM) # Currently haven't found a way to easily wrap the calls in wasmtime when run from ctest. - gtest_discover_tests(${MODULE_NAME}_tests WORKING_DIRECTORY ${CMAKE_BINARY_DIR} TEST_FILTER -*_SKIP_CI*) + gtest_discover_tests(${MODULE_NAME}_tests WORKING_DIRECTORY ${CMAKE_BINARY_DIR} TEST_FILTER -*_SKIP_CI* DISCOVERY_TIMEOUT 30) endif() endif() diff --git a/barretenberg/cpp/scripts/audit/generate_audit_status_headers.sh b/barretenberg/cpp/scripts/audit/generate_audit_status_headers.sh index f58bbb23f1e1..eea493afcfca 100755 --- a/barretenberg/cpp/scripts/audit/generate_audit_status_headers.sh +++ b/barretenberg/cpp/scripts/audit/generate_audit_status_headers.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # --- Setup Paths -------------------------------------------------------------- diff --git a/barretenberg/cpp/scripts/audit/run_dashboard_server.sh b/barretenberg/cpp/scripts/audit/run_dashboard_server.sh index 09a69bea434e..77283430b07e 100755 --- a/barretenberg/cpp/scripts/audit/run_dashboard_server.sh +++ b/barretenberg/cpp/scripts/audit/run_dashboard_server.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # Resolve script directory diff --git a/barretenberg/cpp/scripts/bench_cpu_scaling_local.sh b/barretenberg/cpp/scripts/bench_cpu_scaling_local.sh index 1bdd7326f1b5..bdeeac0a8866 100755 --- a/barretenberg/cpp/scripts/bench_cpu_scaling_local.sh +++ b/barretenberg/cpp/scripts/bench_cpu_scaling_local.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # CPU scaling benchmark that runs benchmarks locally # This script runs a command multiple times with different HARDWARE_CONCURRENCY values @@ -70,12 +70,12 @@ extract_bench_time() { # Extract time from JSON file using grep and sed # JSON format is: {"benchmark_name": time_in_nanoseconds, ...} local time_ns="" - + if [ -f "$json_file" ]; then # Extract the value for the specific benchmark name from JSON time_ns=$(grep -oP "\"${bench_name//\\/\\\\}\":\s*\K\d+" "$json_file" 2>/dev/null | head -1) fi - + # If JSON extraction failed, try to extract from log file (fallback) if [ -z "$time_ns" ] && [ -f "${json_file%/bench.json}/output.log" ]; then local log_file="${json_file%/bench.json}/output.log" @@ -123,7 +123,7 @@ for cpu_count in "${CPU_COUNTS[@]}"; do # Execute the command locally with HARDWARE_CONCURRENCY environment variable # Add --bench_out flag to get JSON output HARDWARE_CONCURRENCY=$cpu_count eval "$COMMAND --bench_out $bench_json_file" 2>&1 | tee "$log_file" - + end_time=$(date +%s.%N) wall_time=$(awk -v e="$end_time" -v s="$start_time" 'BEGIN{printf "%.2f", e-s}') @@ -133,14 +133,14 @@ for cpu_count in "${CPU_COUNTS[@]}"; do if [ -z "$bench_time_ns" ] || [ "$bench_time_ns" = "0" ]; then echo -e "${RED}Warning: Could not extract timing for '$BENCH_NAME' from JSON${NC}" echo -e "${YELLOW}Check the JSON file: $bench_json_file${NC}" - + # Show what's in the JSON file for debugging if [ -f "$bench_json_file" ]; then echo -e "${YELLOW}JSON content (first 500 chars):${NC}" head -c 500 "$bench_json_file" echo "" fi - + echo "CPUs: $cpu_count - No timing data found" >> "$RESULTS_FILE" continue fi @@ -278,4 +278,4 @@ if [ "${#ALL_SPEEDUPS[@]}" -gt 1 ]; then fi fi -echo "" \ No newline at end of file +echo "" diff --git a/barretenberg/cpp/scripts/bench_cpu_scaling_remote.sh b/barretenberg/cpp/scripts/bench_cpu_scaling_remote.sh index a76e95ebc4fc..84baaf1fe922 100755 --- a/barretenberg/cpp/scripts/bench_cpu_scaling_remote.sh +++ b/barretenberg/cpp/scripts/bench_cpu_scaling_remote.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # CPU scaling benchmark wrapper that uses benchmark_remote.sh properly # This script runs a command multiple times with different HARDWARE_CONCURRENCY values @@ -81,12 +81,12 @@ extract_bench_time() { # Extract time from JSON file using grep and sed # JSON format is: {"benchmark_name": time_in_nanoseconds, ...} local time_ns="" - + if [ -f "$json_file" ]; then # Extract the value for the specific benchmark name from JSON time_ns=$(grep -oP "\"${bench_name//\\/\\\\}\":\s*\K\d+" "$json_file" 2>/dev/null | head -1) fi - + # If JSON extraction failed, try to extract from log file (fallback) if [ -z "$time_ns" ] && [ -f "${json_file%/bench.json}/output.log" ]; then local log_file="${json_file%/bench.json}/output.log" @@ -135,10 +135,10 @@ for cpu_count in "${CPU_COUNTS[@]}"; do # Use tee to show output in real-time AND save to log file bench_json_file="$run_dir/bench.json" ./scripts/benchmark_remote.sh bb "HARDWARE_CONCURRENCY=$cpu_count $COMMAND --bench_out /tmp/bench_${cpu_count}.json" 2>&1 | tee "$log_file" - + # Retrieve the JSON file from remote ssh $BB_SSH_KEY $BB_SSH_INSTANCE "cat /tmp/bench_${cpu_count}.json" > "$bench_json_file" 2>/dev/null - + # Clean up the remote benchmark file after retrieval ssh $BB_SSH_KEY $BB_SSH_INSTANCE "rm -f /tmp/bench_${cpu_count}.json" 2>/dev/null @@ -151,14 +151,14 @@ for cpu_count in "${CPU_COUNTS[@]}"; do if [ -z "$bench_time_ns" ] || [ "$bench_time_ns" = "0" ]; then echo -e "${RED}Warning: Could not extract timing for '$BENCH_NAME' from JSON${NC}" echo -e "${YELLOW}Check the JSON file: $bench_json_file${NC}" - + # Show what's in the JSON file for debugging if [ -f "$bench_json_file" ]; then echo -e "${YELLOW}JSON content (first 500 chars):${NC}" head -c 500 "$bench_json_file" echo "" fi - + echo "CPUs: $cpu_count - No timing data found" >> "$RESULTS_FILE" continue fi diff --git a/barretenberg/cpp/scripts/test_chonk_standalone_vks_havent_changed.sh b/barretenberg/cpp/scripts/test_chonk_standalone_vks_havent_changed.sh index e1b8afc01b56..290839e6cfcf 100755 --- a/barretenberg/cpp/scripts/test_chonk_standalone_vks_havent_changed.sh +++ b/barretenberg/cpp/scripts/test_chonk_standalone_vks_havent_changed.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash source $(git rev-parse --show-toplevel)/ci3/source # export bb as it is needed when using exported functions diff --git a/barretenberg/cpp/scripts/update_acir.sh b/barretenberg/cpp/scripts/update_acir.sh index 2f768ffa8e19..aa3d56bb3f50 100755 --- a/barretenberg/cpp/scripts/update_acir.sh +++ b/barretenberg/cpp/scripts/update_acir.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash source $(git rev-parse --show-toplevel)/ci3/source # This script replaces std::array with std::shared_ptr> diff --git a/barretenberg/cpp/scripts/zig-ar.sh b/barretenberg/cpp/scripts/zig-ar.sh index 9bb97ee0d2c4..f5dbcae843cf 100755 --- a/barretenberg/cpp/scripts/zig-ar.sh +++ b/barretenberg/cpp/scripts/zig-ar.sh @@ -1,2 +1,2 @@ -#!/bin/bash +#!/usr/bin/env bash exec zig ar "$@" diff --git a/barretenberg/cpp/scripts/zig-c++.sh b/barretenberg/cpp/scripts/zig-c++.sh new file mode 100755 index 000000000000..3c1a69cb9ad6 --- /dev/null +++ b/barretenberg/cpp/scripts/zig-c++.sh @@ -0,0 +1,8 @@ +#!/bin/bash +# Wrapper for zig c++ that pins glibc 2.35 on Linux (Ubuntu 22.04+ compat) +# and uses native target on macOS. +if [[ "$(uname -s)" == "Linux" ]]; then + exec zig c++ -target native-linux-gnu.2.35 "$@" +else + exec zig c++ "$@" +fi diff --git a/barretenberg/cpp/scripts/zig-cc.sh b/barretenberg/cpp/scripts/zig-cc.sh new file mode 100755 index 000000000000..6f1444434676 --- /dev/null +++ b/barretenberg/cpp/scripts/zig-cc.sh @@ -0,0 +1,8 @@ +#!/bin/bash +# Wrapper for zig cc that pins glibc 2.35 on Linux (Ubuntu 22.04+ compat) +# and uses native target on macOS. +if [[ "$(uname -s)" == "Linux" ]]; then + exec zig cc -target native-linux-gnu.2.35 "$@" +else + exec zig cc "$@" +fi diff --git a/barretenberg/cpp/scripts/zig-ranlib.sh b/barretenberg/cpp/scripts/zig-ranlib.sh index 774f6523dd58..ee4f1852f25f 100755 --- a/barretenberg/cpp/scripts/zig-ranlib.sh +++ b/barretenberg/cpp/scripts/zig-ranlib.sh @@ -1,2 +1,2 @@ -#!/bin/bash +#!/usr/bin/env bash exec zig ranlib "$@" diff --git a/barretenberg/cpp/src/barretenberg/avm_fuzzer/run_fuzzer.sh b/barretenberg/cpp/src/barretenberg/avm_fuzzer/run_fuzzer.sh index f1d989bfedfe..0c0f6cd4dc11 100755 --- a/barretenberg/cpp/src/barretenberg/avm_fuzzer/run_fuzzer.sh +++ b/barretenberg/cpp/src/barretenberg/avm_fuzzer/run_fuzzer.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to run AVM fuzzers with default parameters # Usage: ./run_fuzzer.sh [options] diff --git a/barretenberg/cpp/src/barretenberg/bb/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/bb/CMakeLists.txt index c3858e2dded9..8557dc52038b 100644 --- a/barretenberg/cpp/src/barretenberg/bb/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/bb/CMakeLists.txt @@ -20,7 +20,7 @@ if (NOT(FUZZING)) ) # Link avm_transpiler when library is provided if(AVM_TRANSPILER_LIB) - target_link_libraries(bb PRIVATE ${AVM_TRANSPILER_LIB}) + target_link_libraries(bb PRIVATE avm_transpiler) endif() if(NOT WASM) target_link_libraries(bb PRIVATE ipc) @@ -60,7 +60,7 @@ if (NOT(FUZZING)) ) # Link avm_transpiler when library is provided if(AVM_TRANSPILER_LIB) - target_link_libraries(bb-avm PRIVATE ${AVM_TRANSPILER_LIB}) + target_link_libraries(bb-avm PRIVATE avm_transpiler) endif() if(NOT WASM) target_link_libraries(bb-avm PRIVATE ipc) diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/.gitignore b/barretenberg/cpp/src/barretenberg/nodejs_module/.gitignore new file mode 100644 index 000000000000..61c3bc75a05e --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/.gitignore @@ -0,0 +1 @@ +.yarn diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/.yarnrc.yml b/barretenberg/cpp/src/barretenberg/nodejs_module/.yarnrc.yml new file mode 100644 index 000000000000..3186f3f0795a --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/.yarnrc.yml @@ -0,0 +1 @@ +nodeLinker: node-modules diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/package.json b/barretenberg/cpp/src/barretenberg/nodejs_module/package.json index 594797b5660e..f2365b59a499 100644 --- a/barretenberg/cpp/src/barretenberg/nodejs_module/package.json +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/package.json @@ -2,7 +2,7 @@ "name": "nodejs_module", "private": true, "version": "0.0.0", - "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e", + "packageManager": "yarn@4.5.2", "dependencies": { "node-addon-api": "^8.0.0", "node-api-headers": "^1.1.0" diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/yarn.lock b/barretenberg/cpp/src/barretenberg/nodejs_module/yarn.lock index 6a671ec7eece..68a1c99d8baf 100644 --- a/barretenberg/cpp/src/barretenberg/nodejs_module/yarn.lock +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/yarn.lock @@ -1,13 +1,579 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! +__metadata: + version: 8 + cacheKey: 10c0 -node-addon-api@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-8.0.0.tgz#5453b7ad59dd040d12e0f1a97a6fa1c765c5c9d2" - integrity sha512-ipO7rsHEBqa9STO5C5T10fj732ml+5kLN1cAG8/jdHd56ldQeGj3Q7+scUS+VHK/qy1zLEwC4wMK5+yM0btPvw== +"@isaacs/balanced-match@npm:^4.0.1": + version: 4.0.1 + resolution: "@isaacs/balanced-match@npm:4.0.1" + checksum: 10c0/7da011805b259ec5c955f01cee903da72ad97c5e6f01ca96197267d3f33103d5b2f8a1af192140f3aa64526c593c8d098ae366c2b11f7f17645d12387c2fd420 + languageName: node + linkType: hard -node-api-headers@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.1.0.tgz#3f9dd7bb10b29e1c3e3db675979605a308b2373c" - integrity sha512-ucQW+SbYCUPfprvmzBsnjT034IGRB2XK8rRc78BgjNKhTdFKgAwAmgW704bKIBmcYW48it0Gkjpkd39Azrwquw== +"@isaacs/brace-expansion@npm:^5.0.1": + version: 5.0.1 + resolution: "@isaacs/brace-expansion@npm:5.0.1" + dependencies: + "@isaacs/balanced-match": "npm:^4.0.1" + checksum: 10c0/e5d67c7bbf1f17b88132a35bc638af306d48acbb72810d48fa6e6edd8ab375854773108e8bf70f021f7ef6a8273455a6d1f0c3b5aa2aff06ce7894049ab77fb8 + languageName: node + linkType: hard + +"@isaacs/fs-minipass@npm:^4.0.0": + version: 4.0.1 + resolution: "@isaacs/fs-minipass@npm:4.0.1" + dependencies: + minipass: "npm:^7.0.4" + checksum: 10c0/c25b6dc1598790d5b55c0947a9b7d111cfa92594db5296c3b907e2f533c033666f692a3939eadac17b1c7c40d362d0b0635dc874cbfe3e70db7c2b07cc97a5d2 + languageName: node + linkType: hard + +"@npmcli/agent@npm:^4.0.0": + version: 4.0.0 + resolution: "@npmcli/agent@npm:4.0.0" + dependencies: + agent-base: "npm:^7.1.0" + http-proxy-agent: "npm:^7.0.0" + https-proxy-agent: "npm:^7.0.1" + lru-cache: "npm:^11.2.1" + socks-proxy-agent: "npm:^8.0.3" + checksum: 10c0/f7b5ce0f3dd42c3f8c6546e8433573d8049f67ef11ec22aa4704bc41483122f68bf97752e06302c455ead667af5cb753e6a09bff06632bc465c1cfd4c4b75a53 + languageName: node + linkType: hard + +"@npmcli/fs@npm:^5.0.0": + version: 5.0.0 + resolution: "@npmcli/fs@npm:5.0.0" + dependencies: + semver: "npm:^7.3.5" + checksum: 10c0/26e376d780f60ff16e874a0ac9bc3399186846baae0b6e1352286385ac134d900cc5dafaded77f38d77f86898fc923ae1cee9d7399f0275b1aa24878915d722b + languageName: node + linkType: hard + +"abbrev@npm:^4.0.0": + version: 4.0.0 + resolution: "abbrev@npm:4.0.0" + checksum: 10c0/b4cc16935235e80702fc90192e349e32f8ef0ed151ef506aa78c81a7c455ec18375c4125414b99f84b2e055199d66383e787675f0bcd87da7a4dbd59f9eac1d5 + languageName: node + linkType: hard + +"agent-base@npm:^7.1.0, agent-base@npm:^7.1.2": + version: 7.1.4 + resolution: "agent-base@npm:7.1.4" + checksum: 10c0/c2c9ab7599692d594b6a161559ada307b7a624fa4c7b03e3afdb5a5e31cd0e53269115b620fcab024c5ac6a6f37fa5eb2e004f076ad30f5f7e6b8b671f7b35fe + languageName: node + linkType: hard + +"cacache@npm:^20.0.1": + version: 20.0.3 + resolution: "cacache@npm:20.0.3" + dependencies: + "@npmcli/fs": "npm:^5.0.0" + fs-minipass: "npm:^3.0.0" + glob: "npm:^13.0.0" + lru-cache: "npm:^11.1.0" + minipass: "npm:^7.0.3" + minipass-collect: "npm:^2.0.1" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + p-map: "npm:^7.0.2" + ssri: "npm:^13.0.0" + unique-filename: "npm:^5.0.0" + checksum: 10c0/c7da1ca694d20e8f8aedabd21dc11518f809a7d2b59aa76a1fc655db5a9e62379e465c157ddd2afe34b19230808882288effa6911b2de26a088a6d5645123462 + languageName: node + linkType: hard + +"chownr@npm:^3.0.0": + version: 3.0.0 + resolution: "chownr@npm:3.0.0" + checksum: 10c0/43925b87700f7e3893296c8e9c56cc58f926411cce3a6e5898136daaf08f08b9a8eb76d37d3267e707d0dcc17aed2e2ebdf5848c0c3ce95cf910a919935c1b10 + languageName: node + linkType: hard + +"debug@npm:4, debug@npm:^4.3.4": + version: 4.4.3 + resolution: "debug@npm:4.4.3" + dependencies: + ms: "npm:^2.1.3" + peerDependenciesMeta: + supports-color: + optional: true + checksum: 10c0/d79136ec6c83ecbefd0f6a5593da6a9c91ec4d7ddc4b54c883d6e71ec9accb5f67a1a5e96d00a328196b5b5c86d365e98d8a3a70856aaf16b4e7b1985e67f5a6 + languageName: node + linkType: hard + +"encoding@npm:^0.1.13": + version: 0.1.13 + resolution: "encoding@npm:0.1.13" + dependencies: + iconv-lite: "npm:^0.6.2" + checksum: 10c0/36d938712ff00fe1f4bac88b43bcffb5930c1efa57bbcdca9d67e1d9d6c57cfb1200fb01efe0f3109b2ce99b231f90779532814a81370a1bd3274a0f58585039 + languageName: node + linkType: hard + +"env-paths@npm:^2.2.0": + version: 2.2.1 + resolution: "env-paths@npm:2.2.1" + checksum: 10c0/285325677bf00e30845e330eec32894f5105529db97496ee3f598478e50f008c5352a41a30e5e72ec9de8a542b5a570b85699cd63bd2bc646dbcb9f311d83bc4 + languageName: node + linkType: hard + +"err-code@npm:^2.0.2": + version: 2.0.3 + resolution: "err-code@npm:2.0.3" + checksum: 10c0/b642f7b4dd4a376e954947550a3065a9ece6733ab8e51ad80db727aaae0817c2e99b02a97a3d6cecc648a97848305e728289cf312d09af395403a90c9d4d8a66 + languageName: node + linkType: hard + +"exponential-backoff@npm:^3.1.1": + version: 3.1.3 + resolution: "exponential-backoff@npm:3.1.3" + checksum: 10c0/77e3ae682b7b1f4972f563c6dbcd2b0d54ac679e62d5d32f3e5085feba20483cf28bd505543f520e287a56d4d55a28d7874299941faf637e779a1aa5994d1267 + languageName: node + linkType: hard + +"fdir@npm:^6.5.0": + version: 6.5.0 + resolution: "fdir@npm:6.5.0" + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + checksum: 10c0/e345083c4306b3aed6cb8ec551e26c36bab5c511e99ea4576a16750ddc8d3240e63826cc624f5ae17ad4dc82e68a253213b60d556c11bfad064b7607847ed07f + languageName: node + linkType: hard + +"fs-minipass@npm:^3.0.0": + version: 3.0.3 + resolution: "fs-minipass@npm:3.0.3" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/63e80da2ff9b621e2cb1596abcb9207f1cf82b968b116ccd7b959e3323144cce7fb141462200971c38bbf2ecca51695069db45265705bed09a7cd93ae5b89f94 + languageName: node + linkType: hard + +"glob@npm:^13.0.0": + version: 13.0.2 + resolution: "glob@npm:13.0.2" + dependencies: + minimatch: "npm:^10.1.2" + minipass: "npm:^7.1.2" + path-scurry: "npm:^2.0.0" + checksum: 10c0/3d4b09efa922c4cba9be6d5b9efae14384e7422aeb886eb35fba8a94820b8281474b8d3f16927127fb1a0c8580e18fc00e3fda03c8dc31fa0af3ba918edeeb04 + languageName: node + linkType: hard + +"graceful-fs@npm:^4.2.6": + version: 4.2.11 + resolution: "graceful-fs@npm:4.2.11" + checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 + languageName: node + linkType: hard + +"http-cache-semantics@npm:^4.1.1": + version: 4.2.0 + resolution: "http-cache-semantics@npm:4.2.0" + checksum: 10c0/45b66a945cf13ec2d1f29432277201313babf4a01d9e52f44b31ca923434083afeca03f18417f599c9ab3d0e7b618ceb21257542338b57c54b710463b4a53e37 + languageName: node + linkType: hard + +"http-proxy-agent@npm:^7.0.0": + version: 7.0.2 + resolution: "http-proxy-agent@npm:7.0.2" + dependencies: + agent-base: "npm:^7.1.0" + debug: "npm:^4.3.4" + checksum: 10c0/4207b06a4580fb85dd6dff521f0abf6db517489e70863dca1a0291daa7f2d3d2d6015a57bd702af068ea5cf9f1f6ff72314f5f5b4228d299c0904135d2aef921 + languageName: node + linkType: hard + +"https-proxy-agent@npm:^7.0.1": + version: 7.0.6 + resolution: "https-proxy-agent@npm:7.0.6" + dependencies: + agent-base: "npm:^7.1.2" + debug: "npm:4" + checksum: 10c0/f729219bc735edb621fa30e6e84e60ee5d00802b8247aac0d7b79b0bd6d4b3294737a337b93b86a0bd9e68099d031858a39260c976dc14cdbba238ba1f8779ac + languageName: node + linkType: hard + +"iconv-lite@npm:^0.6.2": + version: 0.6.3 + resolution: "iconv-lite@npm:0.6.3" + dependencies: + safer-buffer: "npm:>= 2.1.2 < 3.0.0" + checksum: 10c0/98102bc66b33fcf5ac044099d1257ba0b7ad5e3ccd3221f34dd508ab4070edff183276221684e1e0555b145fce0850c9f7d2b60a9fcac50fbb4ea0d6e845a3b1 + languageName: node + linkType: hard + +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 10c0/8b51313850dd33605c6c9d3fd9638b714f4c4c40250cff658209f30d40da60f78992fb2df5dabee4acf589a6a82bbc79ad5486550754bd9ec4e3fc0d4a57d6a6 + languageName: node + linkType: hard + +"ip-address@npm:^10.0.1": + version: 10.1.0 + resolution: "ip-address@npm:10.1.0" + checksum: 10c0/0103516cfa93f6433b3bd7333fa876eb21263912329bfa47010af5e16934eeeff86f3d2ae700a3744a137839ddfad62b900c7a445607884a49b5d1e32a3d7566 + languageName: node + linkType: hard + +"isexe@npm:^4.0.0": + version: 4.0.0 + resolution: "isexe@npm:4.0.0" + checksum: 10c0/5884815115bceac452877659a9c7726382531592f43dc29e5d48b7c4100661aed54018cb90bd36cb2eaeba521092570769167acbb95c18d39afdccbcca06c5ce + languageName: node + linkType: hard + +"lru-cache@npm:^11.0.0, lru-cache@npm:^11.1.0, lru-cache@npm:^11.2.1": + version: 11.2.6 + resolution: "lru-cache@npm:11.2.6" + checksum: 10c0/73bbffb298760e71b2bfe8ebc16a311c6a60ceddbba919cfedfd8635c2d125fbfb5a39b71818200e67973b11f8d59c5a9e31d6f90722e340e90393663a66e5cd + languageName: node + linkType: hard + +"make-fetch-happen@npm:^15.0.0": + version: 15.0.3 + resolution: "make-fetch-happen@npm:15.0.3" + dependencies: + "@npmcli/agent": "npm:^4.0.0" + cacache: "npm:^20.0.1" + http-cache-semantics: "npm:^4.1.1" + minipass: "npm:^7.0.2" + minipass-fetch: "npm:^5.0.0" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + negotiator: "npm:^1.0.0" + proc-log: "npm:^6.0.0" + promise-retry: "npm:^2.0.1" + ssri: "npm:^13.0.0" + checksum: 10c0/525f74915660be60b616bcbd267c4a5b59481b073ba125e45c9c3a041bb1a47a2bd0ae79d028eb6f5f95bf9851a4158423f5068539c3093621abb64027e8e461 + languageName: node + linkType: hard + +"minimatch@npm:^10.1.2": + version: 10.1.2 + resolution: "minimatch@npm:10.1.2" + dependencies: + "@isaacs/brace-expansion": "npm:^5.0.1" + checksum: 10c0/0cccef3622201703de6ecf9d772c0be1d5513dcc038ed9feb866c20cf798243e678ac35605dac3f1a054650c28037486713fe9e9a34b184b9097959114daf086 + languageName: node + linkType: hard + +"minipass-collect@npm:^2.0.1": + version: 2.0.1 + resolution: "minipass-collect@npm:2.0.1" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/5167e73f62bb74cc5019594709c77e6a742051a647fe9499abf03c71dca75515b7959d67a764bdc4f8b361cf897fbf25e2d9869ee039203ed45240f48b9aa06e + languageName: node + linkType: hard + +"minipass-fetch@npm:^5.0.0": + version: 5.0.1 + resolution: "minipass-fetch@npm:5.0.1" + dependencies: + encoding: "npm:^0.1.13" + minipass: "npm:^7.0.3" + minipass-sized: "npm:^2.0.0" + minizlib: "npm:^3.0.1" + dependenciesMeta: + encoding: + optional: true + checksum: 10c0/50bcf48c9841ebb25e29a2817468595219c72cfffc7c175a1d7327843c8bef9b72cb01778f46df7eca695dfe47ab98e6167af4cb026ddd80f660842919a5193c + languageName: node + linkType: hard + +"minipass-flush@npm:^1.0.5": + version: 1.0.5 + resolution: "minipass-flush@npm:1.0.5" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/2a51b63feb799d2bb34669205eee7c0eaf9dce01883261a5b77410c9408aa447e478efd191b4de6fc1101e796ff5892f8443ef20d9544385819093dbb32d36bd + languageName: node + linkType: hard + +"minipass-pipeline@npm:^1.2.4": + version: 1.2.4 + resolution: "minipass-pipeline@npm:1.2.4" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/cbda57cea20b140b797505dc2cac71581a70b3247b84480c1fed5ca5ba46c25ecc25f68bfc9e6dcb1a6e9017dab5c7ada5eab73ad4f0a49d84e35093e0c643f2 + languageName: node + linkType: hard + +"minipass-sized@npm:^2.0.0": + version: 2.0.0 + resolution: "minipass-sized@npm:2.0.0" + dependencies: + minipass: "npm:^7.1.2" + checksum: 10c0/f9201696a6f6d68610d04c9c83e3d2e5cb9c026aae1c8cbf7e17f386105cb79c1bb088dbc21bf0b1eb4f3fb5df384fd1e7aa3bf1f33868c416ae8c8a92679db8 + languageName: node + linkType: hard + +"minipass@npm:^3.0.0": + version: 3.3.6 + resolution: "minipass@npm:3.3.6" + dependencies: + yallist: "npm:^4.0.0" + checksum: 10c0/a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c + languageName: node + linkType: hard + +"minipass@npm:^7.0.2, minipass@npm:^7.0.3, minipass@npm:^7.0.4, minipass@npm:^7.1.2": + version: 7.1.2 + resolution: "minipass@npm:7.1.2" + checksum: 10c0/b0fd20bb9fb56e5fa9a8bfac539e8915ae07430a619e4b86ff71f5fc757ef3924b23b2c4230393af1eda647ed3d75739e4e0acb250a6b1eb277cf7f8fe449557 + languageName: node + linkType: hard + +"minizlib@npm:^3.0.1, minizlib@npm:^3.1.0": + version: 3.1.0 + resolution: "minizlib@npm:3.1.0" + dependencies: + minipass: "npm:^7.1.2" + checksum: 10c0/5aad75ab0090b8266069c9aabe582c021ae53eb33c6c691054a13a45db3b4f91a7fb1bd79151e6b4e9e9a86727b522527c0a06ec7d45206b745d54cd3097bcec + languageName: node + linkType: hard + +"ms@npm:^2.1.3": + version: 2.1.3 + resolution: "ms@npm:2.1.3" + checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 + languageName: node + linkType: hard + +"negotiator@npm:^1.0.0": + version: 1.0.0 + resolution: "negotiator@npm:1.0.0" + checksum: 10c0/4c559dd52669ea48e1914f9d634227c561221dd54734070791f999c52ed0ff36e437b2e07d5c1f6e32909fc625fe46491c16e4a8f0572567d4dd15c3a4fda04b + languageName: node + linkType: hard + +"node-addon-api@npm:^8.0.0": + version: 8.0.0 + resolution: "node-addon-api@npm:8.0.0" + dependencies: + node-gyp: "npm:latest" + checksum: 10c0/20eb231362cc07c62d9839164473744d985be5d82685214f3750d990d9f61ef366e0ba112a766c925d640ed29b2a500b83568e895dc2444dcd5db01e615aac2b + languageName: node + linkType: hard + +"node-api-headers@npm:^1.1.0": + version: 1.1.0 + resolution: "node-api-headers@npm:1.1.0" + checksum: 10c0/7806d71077348ea199034e8c90a9147038d37fcccc1b85717e48c095fe31783a4f909f5daced4506e6cbce93fba91220bb3fc8626ee0640d26de9860f6500174 + languageName: node + linkType: hard + +"node-gyp@npm:latest": + version: 12.2.0 + resolution: "node-gyp@npm:12.2.0" + dependencies: + env-paths: "npm:^2.2.0" + exponential-backoff: "npm:^3.1.1" + graceful-fs: "npm:^4.2.6" + make-fetch-happen: "npm:^15.0.0" + nopt: "npm:^9.0.0" + proc-log: "npm:^6.0.0" + semver: "npm:^7.3.5" + tar: "npm:^7.5.4" + tinyglobby: "npm:^0.2.12" + which: "npm:^6.0.0" + bin: + node-gyp: bin/node-gyp.js + checksum: 10c0/3ed046746a5a7d90950cd8b0547332b06598443f31fe213ef4332a7174c7b7d259e1704835feda79b87d3f02e59d7791842aac60642ede4396ab25fdf0f8f759 + languageName: node + linkType: hard + +"nodejs_module@workspace:.": + version: 0.0.0-use.local + resolution: "nodejs_module@workspace:." + dependencies: + node-addon-api: "npm:^8.0.0" + node-api-headers: "npm:^1.1.0" + languageName: unknown + linkType: soft + +"nopt@npm:^9.0.0": + version: 9.0.0 + resolution: "nopt@npm:9.0.0" + dependencies: + abbrev: "npm:^4.0.0" + bin: + nopt: bin/nopt.js + checksum: 10c0/1822eb6f9b020ef6f7a7516d7b64a8036e09666ea55ac40416c36e4b2b343122c3cff0e2f085675f53de1d2db99a2a89a60ccea1d120bcd6a5347bf6ceb4a7fd + languageName: node + linkType: hard + +"p-map@npm:^7.0.2": + version: 7.0.4 + resolution: "p-map@npm:7.0.4" + checksum: 10c0/a5030935d3cb2919d7e89454d1ce82141e6f9955413658b8c9403cfe379283770ed3048146b44cde168aa9e8c716505f196d5689db0ae3ce9a71521a2fef3abd + languageName: node + linkType: hard + +"path-scurry@npm:^2.0.0": + version: 2.0.1 + resolution: "path-scurry@npm:2.0.1" + dependencies: + lru-cache: "npm:^11.0.0" + minipass: "npm:^7.1.2" + checksum: 10c0/2a16ed0e81fbc43513e245aa5763354e25e787dab0d539581a6c3f0f967461a159ed6236b2559de23aa5b88e7dc32b469b6c47568833dd142a4b24b4f5cd2620 + languageName: node + linkType: hard + +"picomatch@npm:^4.0.3": + version: 4.0.3 + resolution: "picomatch@npm:4.0.3" + checksum: 10c0/9582c951e95eebee5434f59e426cddd228a7b97a0161a375aed4be244bd3fe8e3a31b846808ea14ef2c8a2527a6eeab7b3946a67d5979e81694654f939473ae2 + languageName: node + linkType: hard + +"proc-log@npm:^6.0.0": + version: 6.1.0 + resolution: "proc-log@npm:6.1.0" + checksum: 10c0/4f178d4062733ead9d71a9b1ab24ebcecdfe2250916a5b1555f04fe2eda972a0ec76fbaa8df1ad9c02707add6749219d118a4fc46dc56bdfe4dde4b47d80bb82 + languageName: node + linkType: hard + +"promise-retry@npm:^2.0.1": + version: 2.0.1 + resolution: "promise-retry@npm:2.0.1" + dependencies: + err-code: "npm:^2.0.2" + retry: "npm:^0.12.0" + checksum: 10c0/9c7045a1a2928094b5b9b15336dcd2a7b1c052f674550df63cc3f36cd44028e5080448175b6f6ca32b642de81150f5e7b1a98b728f15cb069f2dd60ac2616b96 + languageName: node + linkType: hard + +"retry@npm:^0.12.0": + version: 0.12.0 + resolution: "retry@npm:0.12.0" + checksum: 10c0/59933e8501727ba13ad73ef4a04d5280b3717fd650408460c987392efe9d7be2040778ed8ebe933c5cbd63da3dcc37919c141ef8af0a54a6e4fca5a2af177bfe + languageName: node + linkType: hard + +"safer-buffer@npm:>= 2.1.2 < 3.0.0": + version: 2.1.2 + resolution: "safer-buffer@npm:2.1.2" + checksum: 10c0/7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4 + languageName: node + linkType: hard + +"semver@npm:^7.3.5": + version: 7.7.4 + resolution: "semver@npm:7.7.4" + bin: + semver: bin/semver.js + checksum: 10c0/5215ad0234e2845d4ea5bb9d836d42b03499546ddafb12075566899fc617f68794bb6f146076b6881d755de17d6c6cc73372555879ec7dce2c2feee947866ad2 + languageName: node + linkType: hard + +"smart-buffer@npm:^4.2.0": + version: 4.2.0 + resolution: "smart-buffer@npm:4.2.0" + checksum: 10c0/a16775323e1404dd43fabafe7460be13a471e021637bc7889468eb45ce6a6b207261f454e4e530a19500cc962c4cc5348583520843b363f4193cee5c00e1e539 + languageName: node + linkType: hard + +"socks-proxy-agent@npm:^8.0.3": + version: 8.0.5 + resolution: "socks-proxy-agent@npm:8.0.5" + dependencies: + agent-base: "npm:^7.1.2" + debug: "npm:^4.3.4" + socks: "npm:^2.8.3" + checksum: 10c0/5d2c6cecba6821389aabf18728325730504bf9bb1d9e342e7987a5d13badd7a98838cc9a55b8ed3cb866ad37cc23e1086f09c4d72d93105ce9dfe76330e9d2a6 + languageName: node + linkType: hard + +"socks@npm:^2.8.3": + version: 2.8.7 + resolution: "socks@npm:2.8.7" + dependencies: + ip-address: "npm:^10.0.1" + smart-buffer: "npm:^4.2.0" + checksum: 10c0/2805a43a1c4bcf9ebf6e018268d87b32b32b06fbbc1f9282573583acc155860dc361500f89c73bfbb157caa1b4ac78059eac0ef15d1811eb0ca75e0bdadbc9d2 + languageName: node + linkType: hard + +"ssri@npm:^13.0.0": + version: 13.0.1 + resolution: "ssri@npm:13.0.1" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/cf6408a18676c57ff2ed06b8a20dc64bb3e748e5c7e095332e6aecaa2b8422b1e94a739a8453bf65156a8a47afe23757ba4ab52d3ea3b62322dc40875763e17a + languageName: node + linkType: hard + +"tar@npm:^7.5.4": + version: 7.5.7 + resolution: "tar@npm:7.5.7" + dependencies: + "@isaacs/fs-minipass": "npm:^4.0.0" + chownr: "npm:^3.0.0" + minipass: "npm:^7.1.2" + minizlib: "npm:^3.1.0" + yallist: "npm:^5.0.0" + checksum: 10c0/51f261afc437e1112c3e7919478d6176ea83f7f7727864d8c2cce10f0b03a631d1911644a567348c3063c45abdae39718ba97abb073d22aa3538b9a53ae1e31c + languageName: node + linkType: hard + +"tinyglobby@npm:^0.2.12": + version: 0.2.15 + resolution: "tinyglobby@npm:0.2.15" + dependencies: + fdir: "npm:^6.5.0" + picomatch: "npm:^4.0.3" + checksum: 10c0/869c31490d0d88eedb8305d178d4c75e7463e820df5a9b9d388291daf93e8b1eb5de1dad1c1e139767e4269fe75f3b10d5009b2cc14db96ff98986920a186844 + languageName: node + linkType: hard + +"unique-filename@npm:^5.0.0": + version: 5.0.0 + resolution: "unique-filename@npm:5.0.0" + dependencies: + unique-slug: "npm:^6.0.0" + checksum: 10c0/afb897e9cf4c2fb622ea716f7c2bb462001928fc5f437972213afdf1cc32101a230c0f1e9d96fc91ee5185eca0f2feb34127145874975f347be52eb91d6ccc2c + languageName: node + linkType: hard + +"unique-slug@npm:^6.0.0": + version: 6.0.0 + resolution: "unique-slug@npm:6.0.0" + dependencies: + imurmurhash: "npm:^0.1.4" + checksum: 10c0/da7ade4cb04eb33ad0499861f82fe95ce9c7c878b7139dc54d140ecfb6a6541c18a5c8dac16188b8b379fe62c0c1f1b710814baac910cde5f4fec06212126c6a + languageName: node + linkType: hard + +"which@npm:^6.0.0": + version: 6.0.1 + resolution: "which@npm:6.0.1" + dependencies: + isexe: "npm:^4.0.0" + bin: + node-which: bin/which.js + checksum: 10c0/7e710e54ea36d2d6183bee2f9caa27a3b47b9baf8dee55a199b736fcf85eab3b9df7556fca3d02b50af7f3dfba5ea3a45644189836df06267df457e354da66d5 + languageName: node + linkType: hard + +"yallist@npm:^4.0.0": + version: 4.0.0 + resolution: "yallist@npm:4.0.0" + checksum: 10c0/2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a + languageName: node + linkType: hard + +"yallist@npm:^5.0.0": + version: 5.0.0 + resolution: "yallist@npm:5.0.0" + checksum: 10c0/a499c81ce6d4a1d260d4ea0f6d49ab4da09681e32c3f0472dee16667ed69d01dae63a3b81745a24bd78476ec4fcf856114cb4896ace738e01da34b2c42235416 + languageName: node + linkType: hard diff --git a/barretenberg/cpp/src/barretenberg/vm2/tracegen/lib/lookup_into_p_decomposition.hpp b/barretenberg/cpp/src/barretenberg/vm2/tracegen/lib/lookup_into_p_decomposition.hpp index f71726e91d0e..a09e3e8defde 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/tracegen/lib/lookup_into_p_decomposition.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/tracegen/lib/lookup_into_p_decomposition.hpp @@ -24,7 +24,8 @@ template class LookupIntoPDecomposition : public Index }(); const auto& [radix, limb_index, _] = tup; - return cumulative_p_limb_index.at(static_cast(radix)) + static_cast(limb_index); + return cumulative_p_limb_index.at(static_cast(static_cast(radix))) + + static_cast(limb_index); } }; diff --git a/barretenberg/cpp/src/barretenberg/world_state/world_state.test.cpp b/barretenberg/cpp/src/barretenberg/world_state/world_state.test.cpp index b8a5a9d4d879..e28a3d4d9549 100644 --- a/barretenberg/cpp/src/barretenberg/world_state/world_state.test.cpp +++ b/barretenberg/cpp/src/barretenberg/world_state/world_state.test.cpp @@ -492,9 +492,9 @@ TEST_F(WorldStateTest, NullifierBatchInsert) auto response = ws.batch_insert_indexed_leaves( MerkleTreeId::NULLIFIER_TREE, { NullifierLeafValue(150), NullifierLeafValue(142), NullifierLeafValue(180) }, 2); - std::vector> expected_sorted_leaves = { { NullifierLeafValue(180), 2 }, - { NullifierLeafValue(150), 0 }, - { NullifierLeafValue(142), 1 } }; + std::vector> expected_sorted_leaves = { { NullifierLeafValue(180), 2 }, + { NullifierLeafValue(150), 0 }, + { NullifierLeafValue(142), 1 } }; EXPECT_EQ(response.sorted_leaves, expected_sorted_leaves); { diff --git a/barretenberg/docs/scripts/build_docs.sh b/barretenberg/docs/scripts/build_docs.sh index d67f5433282d..2489ea892e4c 100755 --- a/barretenberg/docs/scripts/build_docs.sh +++ b/barretenberg/docs/scripts/build_docs.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e @@ -13,11 +13,8 @@ if ! [ -d "docs/build" ]; then exit 1 fi # First, clean the destination to avoid any leftover files -rm -rf ../docs/static/api/* - -# Copy the built documentation -mkdir -p ../docs/static/api/ -cp -R docs/build/* ../docs/static/api/ +rm -rf ../docs/static/api +cp -R docs/build ../docs/static/api # NOTE(AD): hack - but was blocked and couldn't figure out why we had two examples for something called 'if' with different casing. rm -f ../docs/static/api/if-example.html diff --git a/barretenberg/docs/scripts/cleanup_nightly_versions.sh b/barretenberg/docs/scripts/cleanup_nightly_versions.sh index 918e672c55a5..629a52d1e45d 100755 --- a/barretenberg/docs/scripts/cleanup_nightly_versions.sh +++ b/barretenberg/docs/scripts/cleanup_nightly_versions.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to clean up nightly documentation versions for Barretenberg # This removes all versions containing "nightly" from Barretenberg docs diff --git a/barretenberg/docs/scripts/get_current_version.sh b/barretenberg/docs/scripts/get_current_version.sh index 2a06d207c2e0..78005fe7f897 100755 --- a/barretenberg/docs/scripts/get_current_version.sh +++ b/barretenberg/docs/scripts/get_current_version.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script fetches the current version from versioned_docs directory # Returns the version number (e.g. "v0.85.0") diff --git a/barretenberg/docs/scripts/update_versions.sh b/barretenberg/docs/scripts/update_versions.sh index aa063242a609..bd2e85122787 100755 --- a/barretenberg/docs/scripts/update_versions.sh +++ b/barretenberg/docs/scripts/update_versions.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script updates versions.json with the latest version from versioned_docs. # It automatically detects if nightly versions exist and includes them appropriately. diff --git a/barretenberg/security/ssa_fuzzer_programs_proving/prove_and_verify.sh b/barretenberg/security/ssa_fuzzer_programs_proving/prove_and_verify.sh index a7c5a2d7088d..325810b6db1b 100755 --- a/barretenberg/security/ssa_fuzzer_programs_proving/prove_and_verify.sh +++ b/barretenberg/security/ssa_fuzzer_programs_proving/prove_and_verify.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash target_dir=$(mktemp -d) bb_executable="${BB_EXECUTABLE_PATH:-/root/.bb/bb}" diff --git a/barretenberg/security/ssa_fuzzer_programs_proving/src/prove_and_verify.sh b/barretenberg/security/ssa_fuzzer_programs_proving/src/prove_and_verify.sh index a7c5a2d7088d..325810b6db1b 100755 --- a/barretenberg/security/ssa_fuzzer_programs_proving/src/prove_and_verify.sh +++ b/barretenberg/security/ssa_fuzzer_programs_proving/src/prove_and_verify.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash target_dir=$(mktemp -d) bb_executable="${BB_EXECUTABLE_PATH:-/root/.bb/bb}" diff --git a/barretenberg/sol/scripts/copy_optimized_to_cpp.sh b/barretenberg/sol/scripts/copy_optimized_to_cpp.sh index e4d52652d195..ffe824893ffe 100755 --- a/barretenberg/sol/scripts/copy_optimized_to_cpp.sh +++ b/barretenberg/sol/scripts/copy_optimized_to_cpp.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to copy optimized Solidity verifier files into the C++ honk_optimized_contract.hpp file # This automates the manual process of copying optimized verifier contracts diff --git a/barretenberg/sol/scripts/copy_to_cpp.sh b/barretenberg/sol/scripts/copy_to_cpp.sh index fc7b4f675306..9e719c200868 100755 --- a/barretenberg/sol/scripts/copy_to_cpp.sh +++ b/barretenberg/sol/scripts/copy_to_cpp.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to copy Solidity verifier files into the C++ honk_contract.hpp file # This automates the manual process of copying verifier contracts diff --git a/barretenberg/ts/scripts/browser_postprocess.sh b/barretenberg/ts/scripts/browser_postprocess.sh index 3d802d70a9fa..ad3e1a091c18 100755 --- a/barretenberg/ts/scripts/browser_postprocess.sh +++ b/barretenberg/ts/scripts/browser_postprocess.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash DIR="./dest/browser" diff --git a/barretenberg/ts/scripts/copy_cross.sh b/barretenberg/ts/scripts/copy_cross.sh index 306b86938b34..d94855a3a7df 100755 --- a/barretenberg/ts/scripts/copy_cross.sh +++ b/barretenberg/ts/scripts/copy_cross.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Copies native bb binary and napi module to dest. set -e NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source diff --git a/barretenberg/ts/scripts/copy_native.sh b/barretenberg/ts/scripts/copy_native.sh index 57ddea46bbb1..3fdd7aa2f10e 100755 --- a/barretenberg/ts/scripts/copy_native.sh +++ b/barretenberg/ts/scripts/copy_native.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Copies native bb binary and napi module to dest. set -e NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source diff --git a/bootstrap.sh b/bootstrap.sh index cdd88480cebb..db1527165601 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -1,10 +1,173 @@ #!/usr/bin/env bash -# Usage: ./bootstrap.sh " -# full: Bootstrap the repo from scratch. -# fast: Bootstrap the repo using CI cache where possible to save time building. -# check: Check required toolchains and versions are installed. -# clean: Force a complete clean of the repo. Erases untracked files, be careful! -# Use ci3 script base. +# Install required dependencies first (and restart your shell): +# ./bootstrap.sh install_deps +# +# Usage: ./bootstrap.sh [cmd]" +# ./bootstrap.sh: Max parallelism. Only use on serious hardware. +# ./bootstrap.sh gentle: Less parallelism. Gentler on hardware. Slow. +# ./bootstrap.sh check: Check required toolchains and versions are installed. +# ./bootstrap.sh clean: Force a complete clean of the repo. Erases untracked files, be careful! + +### TOOLCHAIN INSTALLATIONS ############################################################################################ +# Expected toolchain versions. +export expected_min_clang_version=20.0.0 +export expected_min_cmake_version=3.24 +export expected_min_node_version=24.12.0 +export expected_min_zig_version=0.15.1 +export expected_abs_rust_version=1.89.0 +export expected_abs_wasi_version=27.0 +export expected_abs_foundry_version=1.4.1 +export expected_abs_yarn_version=4.5.2 + +function ensure { + command -v $1 &>/dev/null +} + +function install_wasi_sdk { + if cat /opt/wasi-sdk/VERSION 2> /dev/null | grep $expected_abs_wasi_version > /dev/null; then + return + fi + local arch=$(uname -m) + local os=$(os) + local triple=$expected_abs_wasi_version-$arch-$os + curl -LOs https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${expected_abs_wasi_version%%.*}/wasi-sdk-$triple.tar.gz + tar xzf wasi-sdk-$triple.tar.gz + rm wasi-sdk-$triple.tar.gz + echo "Installing wasi sdk at /opt/wasi-sdk..." + sudo rm -rf /opt/wasi-sdk + sudo mv wasi-sdk-$triple /opt/wasi-sdk +} + +function install_foundry { + curl -L https://foundry.paradigm.xyz | bash + ~/.foundry/bin/foundryup -i $expected_abs_foundry_version +} + +function install_zig { + if ! ensure zvm; then + curl -s https://www.zvm.app/install.sh | bash + export PATH="$PATH:$HOME/.zvm/bin" + export PATH="$PATH:$HOME/.zvm/self" + fi + zvm i $expected_min_zig_version +} + +function install_rustup { + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain $expected_abs_rust_version +} + +function install_node { + if ! ensure nvm; then + # Files need to exist if you want nvm installer to update them. + case $SHELL in + */zsh) touch $HOME/.zshrc ;; + */bash) touch $HOME/.bashrc ;; + esac + curl -s -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.4/install.sh | bash + . "$HOME/.nvm/nvm.sh" --no-use + fi + nvm install --lts + nvm alias default lts/* +} + +function install_node_utils { + . "$HOME/.nvm/nvm.sh" + npm i -g corepack solhint +} + +function install_llvm { + wget https://apt.llvm.org/llvm.sh && \ + chmod +x llvm.sh && \ + ./llvm.sh 20 all && \ + rm llvm.sh +} + +function install_yq { + curl -sL https://github.com/mikefarah/yq/releases/download/v4.42.1/yq_linux_$(dpkg --print-architecture) \ + -o $AZTEC_DEV_BIN/yq && chmod +x $AZTEC_DEV_BIN/yq +} + +function install_ldid { + curl -sL https://github.com/ProcursusTeam/ldid/releases/download/v2.1.5-procursus7/ldid_linux_x86_64 \ + -o $AZTEC_DEV_BIN/ldid && chmod +x $AZTEC_DEV_BIN/ldid +} + +export -f install_wasi_sdk install_foundry install_zig install_rustup install_node install_node_utils install_llvm \ + install_yq install_ldid ensure + +function install_linux_deps { + if ! ensure apt; then + echo "Installation requires the apt package manager." + exit 1 + fi + mkdir -p "$AZTEC_DEV_BIN" + spinner "Installing apt dependencies..." "sudo apt install -y jq parallel curl wget zstd redis-tools lsb-release software-properties-common gnupg build-essential cmake ninja-build xxd doxygen" + spinner "Installing llvm..." install_llvm + spinner "Installing yq..." install_yq + spinner "Installing ldid..." install_ldid + spinner "Installing rustup..." install_rustup + spinner "Installing wasi-sdk..." install_wasi_sdk + spinner "Installing foundry..." install_foundry + spinner "Installing zig..." install_zig + spinner "Installing node..." install_node + spinner "Installing node utils..." install_node_utils +} + +function install_macos_deps { + # Check if brew is available. + if ! ensure brew; then + echo "Installation requires Homebrew." + echo "Install it from https://brew.sh" + exit 1 + fi + spinner "Installing brew dependencies..." \ + "brew install cmake ninja llvm@20 doxygen coreutils grep gnu-sed parallel yq zstd redis util-linux libusb jq bash" + + # Make clang 20 available. + local llvm_bin="$(brew --prefix)/Cellar/llvm@20/20.1.8/bin" + mkdir -p "$AZTEC_DEV_BIN" + ln -sf "$llvm_bin/clang" "$AZTEC_DEV_BIN/clang-20" + ln -sf "$llvm_bin/clang++" "$AZTEC_DEV_BIN/clang++-20" + ln -sf "$llvm_bin/clang-format" "$AZTEC_DEV_BIN/clang-format-20" + + spinner "Installing wasi-sdk..." install_wasi_sdk + spinner "Installing foundry..." install_foundry + spinner "Installing rustup..." install_rustup + spinner "Installing zig..." install_zig + spinner "Installing node..." install_node + spinner "Installing node utils..." install_node_utils +} + +function install_deps { + case "$(os)" in + linux) install_linux_deps ;; + macos) install_macos_deps ;; + *) + echo -e "${bold}${red}Unknown operating system.${reset}" + echo "We encourage use of our dev container. See build-images/README.md." + exit 1 + ;; + esac + + echo + if [ -t 0 ]; then + echo "Done! Starting fresh shell..." + exec $SHELL + else + echo "Done! You'll need to start a fresh shell to see PATH updates." + echo + fi +} + +# Special case for installing dependencies (can run on older bash). +if [ "${1:-}" = "install_deps" ]; then + set -euo pipefail + source $(git rev-parse --show-toplevel)/ci3/source_base + install_deps + exit 0 +fi + +### START OF MAIN BOOTSTRAP SCRIPT ##################################################################################### source $(git rev-parse --show-toplevel)/ci3/source_bootstrap # Enable abbreviated output by default. @@ -22,7 +185,7 @@ export MAKEFLAGS="-j${MAKE_JOBS:-$(get_num_cpus)}" export test_cmds_file="/tmp/test_cmds" export bench_cmds_file="/tmp/bench_cmds" -# Cleanup function. Called on script exit. +### CLEANUP ON EXIT #################################################################################################### function cleanup { set +e if [ -n "${test_engine_pid:-}" ]; then @@ -41,143 +204,97 @@ function cleanup { } trap cleanup EXIT -function encourage_dev_container { - echo -e "${bold}${red}ERROR: Toolchain incompatibility. We encourage use of our dev container. See build-images/README.md.${reset}" +### TOOLCHAIN CHECKS ################################################################################################### +function check_minimum_version { + local min_version=$1 + local installed_version=$2 + if [[ "$(printf '%s\n' "$min_version" "$installed_version" | sort -V | head -n1)" != "$min_version" ]]; then + return 1 + fi + return 0 +} + +function toolchain_incompatible { + if [ "$(os)" == "unknown" ] || [ "$(os)" == "linux" ] && ! ensure apt; then + echo -e "${bold}${red}ERROR: Toolchain incompatibility.${reset}" + echo "We encourage use of our dev container. See build-images/README.md." + else + echo -e "${bold}${red}ERROR: Toolchain incompatibility.${reset}" + echo "You can install requirements with: ./bootstrap.sh install_deps" + fi + exit 1 } # Checks for required utilities, toolchains and their versions. -# Developers should probably use the dev container in /build-images to ensure the smoothest experience. +# DO NOT INSTALL THINGS IN HERE. function check_toolchains { # Check for various required utilities. - for util in jq parallel awk git curl zstd; do - if ! command -v $util > /dev/null; then - encourage_dev_container - echo "Utility $util not found." - echo "Installation: sudo apt install $util" - exit 1 + for util in jq parallel awk git curl zstd corepack solhint; do + if ! ensure $util; then + echo "$util not found." + toolchain_incompatible fi done - if ! command -v ldid > /dev/null; then - encourage_dev_container - echo "Utility ldid not found." - echo "Install from https://github.com/ProcursusTeam/ldid." - exit 1 + if [ "$(os)" == "linux" ] && ! ensure ldid; then + echo "ldid not found." + toolchain_incompatible fi if ! yq --version | grep "version v4" > /dev/null; then - encourage_dev_container - echo "yq v4 not installed." - echo "Installation: https://github.com/mikefarah/yq/#install" - exit 1 + echo "yq not found." + toolchain_incompatible fi # Check cmake version. - local cmake_min_version="3.24" local cmake_installed_version=$(cmake --version | head -n1 | awk '{print $3}') - if [[ "$(printf '%s\n' "$cmake_min_version" "$cmake_installed_version" | sort -V | head -n1)" != "$cmake_min_version" ]]; then - encourage_dev_container - echo "Minimum cmake version 3.24 not found." - exit 1 + if ! check_minimum_version $expected_min_cmake_version $cmake_installed_version; then + echo "Minimum cmake version $expected_min_cmake_version not found." + toolchain_incompatible fi # Check clang version. - if ! clang++-20 --version | grep "clang version 20." > /dev/null; then - encourage_dev_container - echo "clang 16 not installed." - echo "Installation: sudo apt install clang-20" - exit 1 + local clang_installed_version=$(clang++-20 --version | head -n1 | awk '{print $4}') + if ! check_minimum_version $expected_min_clang_version $clang_installed_version; then + echo "Minimum clang version $expected_min_clang_version not found." + toolchain_incompatible fi # Check zig version. - if ! zig version | grep "0.15.1" > /dev/null; then - encourage_dev_container - echo "zig 0.15.1 not installed." - echo "Install in /opt/zig." - exit 1 + local zig_installed_version=$(zig version) + if ! check_minimum_version $expected_min_zig_version $zig_installed_version; then + echo "Minimum zig version $expected_min_zig_version not found." + toolchain_incompatible fi # Check rustup installed. - local rust_version=$(yq '.toolchain.channel' ./avm-transpiler/rust-toolchain.toml) - if ! command -v rustup > /dev/null; then - encourage_dev_container + if ! ensure rustup; then echo "Rustup not installed." - echo "Installation:" - echo " curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain $rust_version" - exit 1 + toolchain_incompatible fi - if ! rustup show | grep $rust_version > /dev/null; then - if [ "${CI:-0}" -eq 1 ]; then - echo "Attempting install of required Rust version $rust_version" - rustup self update 2>/dev/null || true - rustup toolchain install $rust_version - rustup default $rust_version - else - # Cargo will download necessary version of rust at runtime but warn to alert that an update to the build-image - # is desirable. - echo -e "${bold}${yellow}WARN: Rust ${rust_version} is not installed. Performance will be degraded.${reset}" - fi + if ! rustup show | grep $expected_abs_rust_version > /dev/null; then + # Cargo will download necessary version of rust at runtime but warn to update the build-image. + echo -e "${bold}${yellow}WARN: Rust ${expected_abs_rust_version} is not installed. Update build-image.${reset}" fi # Check wasi-sdk version. - if ! cat /opt/wasi-sdk/VERSION 2> /dev/null | grep 27.0 > /dev/null; then - encourage_dev_container - echo "wasi-sdk-27 not found at /opt/wasi-sdk." - echo "Use dev container, build from source, or you can install linux x86 version with:" - echo " curl -s -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz | tar zxf - && sudo mv wasi-sdk-27.0-x86_64-linux /opt/wasi-sdk" - exit 1 + if ! cat /opt/wasi-sdk/VERSION 2> /dev/null | grep $expected_abs_wasi_version > /dev/null; then + toolchain_incompatible fi # Check foundry version. - local foundry_version="v1.4.1" for tool in forge anvil; do - if ! $tool --version 2> /dev/null | grep "${foundry_version#nightly-}" > /dev/null; then - echo "$tool not in PATH or incorrect version (requires $foundry_version)." - if [ "${CI:-0}" -eq 1 ]; then - echo "Attempting install of required foundry version $foundry_version" - curl -L https://foundry.paradigm.xyz | bash - ~/.foundry/bin/foundryup -i $foundry_version - else - encourage_dev_container - echo "Installation: https://book.getfoundry.sh/getting-started/installation" - echo " curl -L https://foundry.paradigm.xyz | bash" - echo " foundryup -i $foundry_version" - exit 1 - fi + if ! $tool --version 2> /dev/null | grep "$expected_abs_foundry_version" > /dev/null; then + echo "$tool version $expected_abs_foundry_version not found." + toolchain_incompatible fi done # Check Node.js version. - local node_min_version="24.12.0" local node_installed_version=$(node --version | cut -d 'v' -f 2) - if [[ "$(printf '%s\n' "$node_min_version" "$node_installed_version" | sort -V | head -n1)" != "$node_min_version" ]]; then - encourage_dev_container - echo "Minimum Node.js version $node_min_version not found (got $node_installed_version)." - echo "Installation: nvm install $node_min_version" - exit 1 + if ! check_minimum_version $expected_min_node_version $node_installed_version; then + echo "Minimum node version $expected_min_node_version not found." + toolchain_incompatible + fi + # Check yarn version. This catches oddities like an overriding .yarnrc.yml outside the repo. + if [ "$expected_abs_yarn_version" != "$(corepack yarn@$expected_abs_yarn_version --version)" ]; then + echo "Yarn version $expected_abs_yarn_version not found. Check for a rogue .yarnrc.yml in e.g. home directory." + toolchain_incompatible fi - # Check for required npm globals. - for util in corepack solhint; do - if ! command -v $util > /dev/null; then - encourage_dev_container - echo "$util not found." - echo "Installation: npm install --global $util" - exit 1 - fi - done -} - -function versions { - local noir_version anvil_version node_version cmake_version clang_version zig_version rustc_version wasi_sdk_version - noir_version=$(git -C noir/noir-repo describe --tags --always HEAD) - anvil_version=$(anvil --version | head -n1 | sed -E 's/anvil Version: ([0-9.]+).*/\1/') - node_version=$(node --version | cut -d 'v' -f 2) - cmake_version=$(cmake --version | head -n1 | cut -d' ' -f3) - clang_version=$(clang++-20 --version | head -n1 | cut -d' ' -f4) - zig_version=$(zig version) - rustc_version=$(rustc --version | cut -d' ' -f2) - wasi_sdk_version=$(cat /opt/wasi-sdk/VERSION 2> /dev/null | head -n1) - echo "noir: $noir_version" - echo "foundry: $anvil_version" - echo "node: $node_version" - echo "cmake: $cmake_version" - echo "clang: $clang_version" - echo "zig: $zig_version" - echo "rustc: $rustc_version" - echo "wasi-sdk: $wasi_sdk_version" } +### BUILDING AND TESTING ############################################################################################### # Install pre-commit git hooks. function install_hooks { hooks_dir=$(git rev-parse --git-path hooks) @@ -210,7 +327,7 @@ function pull_submodules { echo "Removing old noir clone..." rm -rf noir/noir-repo fi - denoise "git submodule update --init --recursive --depth 1 --jobs 8 && git -C noir/noir-repo fetch --tags" + denoise "git submodule update --init --recursive --depth 1 --jobs 8 && git -C noir/noir-repo fetch --tags &>/dev/null" } function start_txes { @@ -361,6 +478,27 @@ function bench { cache_upload bench-$(git rev-parse HEAD^{tree}).tar.gz bench-out/bench.json } +### RELEASING ########################################################################################################## +function versions { + local noir_version anvil_version node_version cmake_version clang_version zig_version rustc_version wasi_sdk_version + noir_version=$(git -C noir/noir-repo describe --tags --always HEAD) + anvil_version=$(anvil --version | head -n1 | sed -E 's/anvil Version: ([0-9.]+).*/\1/') + node_version=$(node --version | cut -d 'v' -f 2) + cmake_version=$(cmake --version | head -n1 | cut -d' ' -f3) + clang_version=$(clang++-20 --version | head -n1 | cut -d' ' -f4) + zig_version=$(zig version) + rustc_version=$(rustc --version | cut -d' ' -f2) + wasi_sdk_version=$(cat /opt/wasi-sdk/VERSION 2> /dev/null | head -n1) + echo "noir: $noir_version" + echo "foundry: $anvil_version" + echo "node: $node_version" + echo "cmake: $cmake_version" + echo "clang: $clang_version" + echo "zig: $zig_version" + echo "rustc: $rustc_version" + echo "wasi-sdk: $wasi_sdk_version" +} + function release_github { # Add an easy link for comparing to previous release. local compare_link="" @@ -394,19 +532,9 @@ function release_github { } function release { - # Our releases are controlled by the REF_NAME environment variable, which should be a valid semver (but can have a leading v). + # Releases are triggered when REF_NAME is a valid semver (but can have a leading v). # We ensure there is a github release for our REF_NAME, if not on latest (in which case release-please creates it). # We derive a dist tag from our prerelease portion of our REF_NAME semver. It is latest if no prerelease. - # Our steps: - # barretenberg/cpp => upload binaries to github release - # barretenberg/ts - # + noir - # + yarn-project => NPM publish to dist tag, version is our REF_NAME without a leading v. - # aztec-up => upload scripts to prod if dist tag is latest - # playground => publish if dist tag is latest. - # release-image => push docker image to dist tag. - # boxes/l1-contracts/aztec-nr => mirror repo to branch equal to dist tag (master if latest). Also mirror to tag equal to REF_NAME. - echo_header "release all" set -x @@ -442,6 +570,49 @@ function release_dryrun { DRY_RUN=1 release } +### SELF TESTING ####################################################################################################### +function test_bootstrap_linux { + local name=linux-bootstrap-test-ubuntu + docker volume rm $name-volume &>/dev/null || true + trap "docker volume rm $name-volume &>/dev/null" EXIT + docker run --rm -ti --name $name \ + --cpus=32 \ + --ulimit nofile=1048576:1048576 \ + -v $root:/aztec-packages:ro \ + --mount type=volume,src=$name-volume,dst=/root/aztec-packages \ + -w /root \ + ubuntu:24.04 bash -c " +set -euo pipefail +ulimit -n 65536 +apt update && apt install -y git sudo +git config --global --add safe.directory /aztec-packages/.git +git clone --branch=$(git branch --show-current) /aztec-packages +cd aztec-packages +./bootstrap.sh install_deps /dev/null; do sleep 0.5; done + /mnt/user-data/macos/ssh.sh $name bash -c 'cat > /tmp/mac_bootstrap.sh' </dev/null wait $cmd_pid 2>/dev/null cmd_pid= fi + if [ -n "$awk_pid" ]; then + wait $awk_pid 2>/dev/null + awk_pid= + fi exit 130 } -trap cleanup SIGTERM SIGINT +trap on_signal SIGTERM SIGINT # Compute color from job name hash. # Picks a color between 20 and 231 (avoiding very dark/light colors) @@ -45,15 +48,29 @@ function compute_color { color=$(compute_color "$job_name") -# Execute command with output going to process substitution for prefixing -# Run in background so we can capture PID, then wait for it -stdbuf -oL -eL bash -c "$command" > >( - while IFS= read -r line; do - printf '\033[38;5;%sm[%s]\033[0m %s\n' "$color" "$job_name" "$line" - done -) 2>&1 & +# Named pipe so we can track both PIDs independently: +# kill cmd → EOF propagates through fifo → awk drains and exits. +prefix_fifo=$(mktemp -u) +mkfifo "$prefix_fifo" +trap 'rm -f $prefix_fifo' EXIT + +# Awk prefixer reads from fifo, writes to our stdout. +awk -v c="$color" -v n="$job_name" \ + '{printf "\033[38;5;%sm[%s]\033[0m %s\n", c, n, $0; fflush()}' \ + < "$prefix_fifo" & +awk_pid=$! + +# Command writes into the fifo. +stdbuf -oL -eL bash -c "$command" > "$prefix_fifo" 2>&1 & cmd_pid=$! -# Wait for command and capture exit code +# Wait for command and capture exit code. wait $cmd_pid 2>/dev/null -exit $? +code=$? +cmd_pid= + +# Command exited; EOF propagates through fifo. Wait for awk to drain. +wait $awk_pid 2>/dev/null +awk_pid= + +exit $code diff --git a/ci3/cpuset_count b/ci3/cpuset_count index ac8e978abef1..3258da527a66 100755 --- a/ci3/cpuset_count +++ b/ci3/cpuset_count @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Given a valid cpuset string, count the number of CPUs. count=0 diff --git a/ci3/dashboard/chonk-breakdowns/run-local.sh b/ci3/dashboard/chonk-breakdowns/run-local.sh index ca5d3578512c..5e8176f19448 100755 --- a/ci3/dashboard/chonk-breakdowns/run-local.sh +++ b/ci3/dashboard/chonk-breakdowns/run-local.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Run rk.py locally for testing # This script sets up the environment to use local test data diff --git a/ci3/dashboard/rk.py b/ci3/dashboard/rk.py index 12b9a414077f..277092cbbd68 100644 --- a/ci3/dashboard/rk.py +++ b/ci3/dashboard/rk.py @@ -26,7 +26,7 @@ S3_LOGS_PREFIX = os.getenv('S3_LOGS_PREFIX', 'logs') _s3 = boto3.client('s3', region_name='us-east-2') -DASHBOARD_PASSWORD = os.getenv('DASHBOARD_PASSWORD', 'password') +DASHBOARD_PASSWORD = os.getenv('DASHBOARD_PASSWORD', '') CI_METRICS_PORT = int(os.getenv('CI_METRICS_PORT', '8081')) CI_METRICS_URL = os.getenv('CI_METRICS_URL', f'http://localhost:{CI_METRICS_PORT}') @@ -71,6 +71,12 @@ # Another worker already holds the lock — nothing to do pass +# Conditional auth decorator - only require auth if password is set +def optional_auth(f): + if DASHBOARD_PASSWORD: + return auth.login_required(f) + return f + def read_from_disk(key): """Read log from disk.""" try: @@ -134,7 +140,7 @@ def verify_password(username, password): def convert_to_ocs8(text): # Replace URLs not already part of an OCS8 link using negative lookbehind. - pattern = r'(? str: """ @app.route('/') -@auth.login_required +@optional_auth def show_root(): return render_template_string( TEMPLATE, @@ -383,7 +389,7 @@ def show_root(): ) @app.route('/section/
') -@auth.login_required +@optional_auth def show_section(section): return render_template_string( TEMPLATE, @@ -394,14 +400,14 @@ def show_section(section): ) @app.route('/list/') -@auth.login_required +@optional_auth def get_list(key): value = get_list_as_string(key) follow = request.args.get('follow', 'top') return render_template_string(TEMPLATE, value=ansi_to_html(value), follow=follow, filter_str='', filter_prop='') @app.route('/chonk-breakdowns') -@auth.login_required +@optional_auth def chonk_breakdowns(): """Serve the chonk breakdowns viewer page.""" breakdown_html_path = Path('chonk-breakdowns/breakdown-viewer.html') @@ -412,7 +418,7 @@ def chonk_breakdowns(): return "Breakdown viewer not found", 404 @app.route('/api/breakdown/flows') -@auth.login_required +@optional_auth def list_available_flows(): """API endpoint to list available breakdown flows from disk, filtered by runtime and SHA.""" runtime = request.args.get('runtime') @@ -448,7 +454,7 @@ def list_available_flows(): return Response(json.dumps(sorted(list(flows))), mimetype='application/json') @app.route('/api/breakdown///') -@auth.login_required +@optional_auth def get_breakdown(runtime, flow_name, sha): """API endpoint to fetch breakdown JSON from disk.""" breakdown_data = read_breakdown_from_disk(runtime, flow_name, sha) @@ -459,7 +465,7 @@ def get_breakdown(runtime, flow_name, sha): @app.route('/grind') -@auth.login_required +@optional_auth def trigger_grind(): """Trigger a grind job for a flaky test.""" from urllib.parse import urlencode as url_encode @@ -607,7 +613,7 @@ def proxy_api(path): return _proxy(f'/api/{path}') @app.route('/') -@auth.login_required +@optional_auth def get_value(key): # Check if raw text format is requested raw_text = key.endswith('.txt') diff --git a/ci3/denoise b/ci3/denoise index 751484af2d9e..1766e6dd4620 100755 --- a/ci3/denoise +++ b/ci3/denoise @@ -1,7 +1,5 @@ #!/usr/bin/env bash NO_CD=1 source ${root:-$(git rev-parse --show-toplevel)}/ci3/source -source $ci3/source_redis -source $ci3/source_cache # Ensure a command is passed if [ "$#" != 1 ]; then @@ -34,10 +32,15 @@ function cleanup { wait $tail_pid tail_pid= fi + if [ -n "${dots_pid:-}" ]; then + kill $dots_pid &>/dev/null + wait $dots_pid + dots_pid= + fi } trap cleanup SIGINT SIGTERM # Remove file on exit, so we can publish even after SIGINT or SIGTERM. -trap 'cleanup; rm -f $outfile' EXIT +trap 'cleanup; rm -f $outfile $dots_fifo' EXIT dots_per_line=${DENOISE_WIDTH:-64} dot_count=0 @@ -51,7 +54,7 @@ export FORCE_COLOR=${FORCE_COLOR:-1} [ -t 1 ] && realtime=1 || realtime=0 key=$(uuid) -url=http://ci.aztec-labs.com/$key +url=$DASHBOARD_URL/$key outfile=/tmp/$key touch $outfile @@ -60,6 +63,7 @@ function format_log_output { echo -e "Parent Log: $(ci_term_link $PARENT_LOG_ID)" fi echo "Command: ${DENOISE_DISPLAY_NAME:-$cmd}" + echo "Working Dir: ${PWD:-}" echo "Date: $(date)" echo "Status: $status" echo "Took: ${time}" @@ -92,25 +96,29 @@ if [ "$CI_REDIS_AVAILABLE" -eq 1 ]; then log_info="(${yellow}${url}${reset})" fi -# Dot logging. +# Dot logging via named pipe so we can track both PIDs. set +e display_cmd="${DENOISE_DISPLAY_NAME:-$cmd}" echo -e "Executing: $display_cmd ${log_info:-}" echo -n " 0 " -tail --sleep-interval=0.2 -n +1 -f "$outfile" > >( - while IFS= read -r line; do +dots_fifo=$(mktemp -u) +mkfifo "$dots_fifo" + +tail --sleep-interval=0.2 -n +1 -f "$outfile" > "$dots_fifo" & +tail_pid=$! + +while IFS= read -r line; do dot_count=$((dot_count+1)) [ $realtime -eq 1 ] && printf "." if [[ "$dots_per_line" -gt 0 && $((dot_count % dots_per_line)) -eq 0 ]]; then [ $realtime -eq 0 ] && printf '%.s.' $(seq 1 "$dots_per_line") printf "\n%4s " "$dot_count" fi - done -) & -tail_pid=$! +done < "$dots_fifo" & +dots_pid=$! # Execute the command in background. -PARENT_LOG_ID=$key bash -c "$cmd" 2>&1 | redact > $outfile & +PARENT_LOG_ID=$key bash -c "set -euo pipefail; $cmd" 2>&1 | redact > $outfile & job_pid=$! # Wait for the job to finish and get its exit status. @@ -121,13 +129,20 @@ time="${SECONDS}s" [ -n "${publish_pid:-}" ] && kill $publish_pid &>/dev/null || true publish_log_final +# Kill tail (source), EOF propagates through fifo, dots loop drains and exits. +kill $tail_pid &>/dev/null +wait $tail_pid 2>/dev/null +tail_pid= +wait $dots_pid 2>/dev/null +dots_pid= + # Handle non-zero exit status if [ "$status" -ne 0 ]; then # Print "interrupted" on SIGINT or SIGTERM. if [ "$status" -eq 143 ] || [ "$status" -eq 130 ]; then echo -e ". ${yellow}interrupted${reset} ($time)" else - if [ -t 1 ]; then + if [ -t 1 ] || [ "${DUMP_FAIL:-0}" -eq 1 ]; then echo -e "\nCommand exited with status $status. Dumping output:" cat $outfile fi diff --git a/ci3/exec_test b/ci3/exec_test index 20796e06535c..47786a450303 100755 --- a/ci3/exec_test +++ b/ci3/exec_test @@ -36,7 +36,7 @@ Env: REF_NAME=${REF_NAME:-} CURRENT_VERSION=${CURRENT_VERSION:-} CI_FULL=${CI_FU Date: $(date) System: ARCH=$(arch) CPUS=$(nproc --all) MEM=$(free -h | awk '/^Mem:/{print $2}') HOSTNAME=$(hostname) Resources: CPU_LIST=$CPU_LIST CPUS=$CPUS MEM=$MEM TIMEOUT=$TIMEOUT -History: http://ci.aztec-labs.com/list/history_$test_hash${TARGET_BRANCH:+_$TARGET_BRANCH} +History: http://$DASHBOARD_URL/list/history_$test_hash${TARGET_BRANCH:+_$TARGET_BRANCH} EOF diff --git a/ci3/filter_cached_test_cmd b/ci3/filter_cached_test_cmd index 04f288c05c92..4ca27c290f97 100755 --- a/ci3/filter_cached_test_cmd +++ b/ci3/filter_cached_test_cmd @@ -1,6 +1,5 @@ #!/usr/bin/env bash NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source -source $ci3/source_redis function process_batch { set -euo pipefail diff --git a/ci3/grind_test b/ci3/grind_test index 67a730f751ba..b4e4a111b340 100755 --- a/ci3/grind_test +++ b/ci3/grind_test @@ -5,7 +5,6 @@ { NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source -source $ci3/source_redis full_cmd="$1" timeout="${2:-10m}" diff --git a/ci3/log_ci_run b/ci3/log_ci_run index b52b93256edc..1ad1a64d72db 100755 --- a/ci3/log_ci_run +++ b/ci3/log_ci_run @@ -1,8 +1,5 @@ #!/usr/bin/env bash NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source -source $ci3/source_refname -source $ci3/source_redis -source $ci3/source_color status=${1:-RUNNING} key=${2:-} diff --git a/ci3/memsuspend_limit b/ci3/memsuspend_limit index d889f6f20c13..1c1678faa282 100755 --- a/ci3/memsuspend_limit +++ b/ci3/memsuspend_limit @@ -4,9 +4,13 @@ set -eu # This informs the --memsuspend flag for GNU parallel. # Per the documentation: # --memsuspend size -# If the available memory falls below 2 * size, GNU parallel will suspend some of the running jobs. If the available memory falls below size, -# only one job will be running. -# We aim for one fourth of the total memory, to take at most half the memory when running tests (2 * size). +# If the available memory falls below 2 * size, GNU parallel will suspend some of the running jobs. +# If the available memory falls below size, only one job will be running. +# We set memsuspend value to 1/4 the total memory. +# So suspension will start when we exceed 1/2 available memory. +# Only 1 job will be running when we exceed 3/4 available memory. +# If the system has >= 256GB, we act like we have 256GB (set to 64GB). + if [ -n "${MEMSUSPEND:-}" ]; then echo $MEMSUSPEND exit diff --git a/ci3/npm_install_deps b/ci3/npm_install_deps index 30a94c579905..37a797aae107 100755 --- a/ci3/npm_install_deps +++ b/ci3/npm_install_deps @@ -29,7 +29,7 @@ if [ "$CI" -eq 1 ] && [ -f $HOME/ci-started ] && [ "${NO_CACHE:-0}" -eq 0 ]; the fi else if [ "$CI" -ne 1 ]; then - unset CI + export YARN_ENABLE_IMMUTABLE_INSTALLS=false fi denoise "yarn install" fi diff --git a/ci3/os b/ci3/os index 4d17a94fa081..1319c3a8b266 100755 --- a/ci3/os +++ b/ci3/os @@ -3,5 +3,5 @@ set -e case "$(uname -s)" in Linux) echo linux ;; Darwin) echo macos ;; - *) echo "Unsupported OS: $(uname -s). Only linux and macos are supported." >&2; exit 1 ;; + *) echo "unknown" ;; esac diff --git a/ci3/run_test_cmd b/ci3/run_test_cmd index 1c55db098f41..ea46f5071e3b 100755 --- a/ci3/run_test_cmd +++ b/ci3/run_test_cmd @@ -5,9 +5,6 @@ # - Redis publishing (started/failed/flaked events) # - Flake detection and owner notification NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source -source $ci3/source_redis -source $ci3/source_cache -source $ci3/source_refname # We must enable job control to ensure our test runs in it's own process group. # Otherwise, when parallel sends a TERM to the group, it will also kill any child process of the test. @@ -173,7 +170,7 @@ if [ "$publish" -eq 1 ]; then --arg status "started" \ --arg test_cmd "$cmd" \ --arg log_id "$log_key" \ - --arg log_url "http://ci.aztec-labs.com/$log_key" \ + --arg log_url "$DASHBOARD_URL/$log_key" \ --arg ref_name "${TARGET_BRANCH:-$REF_NAME}" \ --arg commit_hash "$COMMIT_HASH" \ --arg commit_author "$COMMIT_AUTHOR" \ @@ -326,7 +323,7 @@ function fail { [ "$CI" -eq 1 ] && post_github_failure_status if [ "$slack_notify_fail" -eq 1 ]; then - slack_notify "Test FAILED on *${TARGET_BRANCH:-$REF_NAME}*: \`$test_cmd\` http://ci.aztec-labs.com/$log_key" "#alerts-next-scenario" + slack_notify "Test FAILED on *${TARGET_BRANCH:-$REF_NAME}*: \`$test_cmd\` $DASHBOARD_URL/$log_key" "#alerts-next-scenario" fi exit $code @@ -357,7 +354,7 @@ function flake { for uid in $owners; do slack_uids+="<@$uid> " done - slack_notify "${slack_uids% }: Test flaked on *${TARGET_BRANCH:-$REF_NAME}*: \`$test_cmd\` http://ci.aztec-labs.com/$log_key" + slack_notify "${slack_uids% }: Test flaked on *${TARGET_BRANCH:-$REF_NAME}*: \`$test_cmd\` $DASHBOARD_URL/$log_key" fi exit 0 diff --git a/ci3/sem_sched b/ci3/sem_sched index 91372d144a26..83f9b7b822ae 100755 --- a/ci3/sem_sched +++ b/ci3/sem_sched @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash source $(git rev-parse --show-toplevel)/ci3/source # Configuration diff --git a/ci3/slack_notify b/ci3/slack_notify index 8d82cf8e3feb..98a249aff5c2 100755 --- a/ci3/slack_notify +++ b/ci3/slack_notify @@ -1,6 +1,5 @@ #!/usr/bin/env bash NO_CD=1 source $(git rev-parse --show-toplevel)/ci3/source -source $ci3/source_redis if [ -z "${SLACK_BOT_TOKEN:-}" ]; then exit 0 diff --git a/ci3/source b/ci3/source index c53bb28b3828..baddc6bc5941 100644 --- a/ci3/source +++ b/ci3/source @@ -1,26 +1,13 @@ # Usage: source $(git rev-parse --show-toplevel)/ci3/source # This is a basis for shell scripts that use the ci3 framework. -# This can be sourced multiple by scripts calling scripts, so it makes sense to only do certain sets through first pass. -# Enter our script directory, allowing usage of scripts from any directory. -[ -z "${NO_CD:-}" ] && cd "$(dirname $0)" - -# We export so we can use from exported functions. export root=${root:-$(git rev-parse --show-toplevel)} export ci3="$root/ci3" -if [[ "$PATH" != *"$ci3:" ]]; then - export PATH=$ci3:$PATH - if [ "${GITHUB_ACTIONS:-}" = "true" ]; then - export PATH=$ci3/bin:$PATH - fi -fi - -[ -z "${CI:-}" ] && export CI=0 -[ "$CI" == "true" ] && export CI=1 - -# We are fine using foundry nightly. -export FOUNDRY_DISABLE_NIGHTLY_WARNING=1 +source $ci3/source_base source $ci3/source_options -source $ci3/source_stdlib source $ci3/source_color +source $ci3/source_stdlib +source $ci3/source_redis +source $ci3/source_refname +source $ci3/source_cache diff --git a/ci3/source_base b/ci3/source_base new file mode 100644 index 000000000000..0c3b0a00bc1c --- /dev/null +++ b/ci3/source_base @@ -0,0 +1,53 @@ +# Most basic setup. +# Should work on old versions of bash. + +# Enter our script directory, allowing usage of scripts from any directory. +[ -z "${NO_CD:-}" ] && cd "$(dirname $0)" + +# We export so we can use from exported functions. +export root=${root:-$(git rev-parse --show-toplevel)} +export ci3="$root/ci3" + +# Update PATH with ci3 tools. +if [[ "$PATH" != *"$ci3:" ]]; then + export PATH=$ci3:$PATH + + # We're on the github runner, we're about to launch ec2 instances with our container, but we need: + # - redis-cli for logging. + # - parallel for running commands in parallel. + if [ "${GITHUB_ACTIONS:-}" = "true" ]; then + export PATH=$ci3/bin:$PATH + fi +fi + +# We use this path to normalise tool names. +# e.g. on macos we link brews llvm clang++ binary to clang++20 to align with linux. +export AZTEC_DEV_BIN="$HOME/.local/aztec-dev-bin" +export PATH=$AZTEC_DEV_BIN:$PATH + +# On macOS, prefer GNU tools where available for better compatibility with our scripts. +if [[ "$(os)" == "macos" ]]; then + brew_prefix=$(brew --prefix) + export PATH=$brew_prefix/opt/gnu-sed/libexec/gnubin:$brew_prefix/opt/coreutils/libexec/gnubin:$brew_prefix/opt/util-linux/bin:$brew_prefix/opt/grep/libexec/gnubin:$PATH +fi + +[ -z "${CI:-}" ] && export CI=0 +[ "$CI" == "true" ] && export CI=1 + +# We are fine using foundry nightly. +export FOUNDRY_DISABLE_NIGHTLY_WARNING=1 + +# Corepack should download without prompting. +export COREPACK_ENABLE_DOWNLOAD_PROMPT=0 + +# Silences some warnings from perl. +export LC_ALL= +export LANG=C + +# GNU parallel sets XDG_CACHE_HOME to empty string in child processes. +# This causes corepack to resolve its cache as a relative path (node/corepack/), +# placing yarn.js under the CWD. If a package.json there has "type": "module", +# Node loads yarn.js as ESM, breaking its require() calls. +# Setting COREPACK_HOME explicitly is robust: corepack checks it first, +# and parallel doesn't touch it (only XDG_* vars). +export COREPACK_HOME="${COREPACK_HOME:-${HOME}/.cache/node/corepack}" diff --git a/ci3/source_bootstrap b/ci3/source_bootstrap index 1e9fd282b49d..81da41658649 100644 --- a/ci3/source_bootstrap +++ b/ci3/source_bootstrap @@ -1,8 +1,19 @@ # Source this first in all bootstrap scripts. # Usage: source $(git rev-parse --show-toplevel)/ci3/source_bootstrap + +if (( BASH_VERSINFO[0] < 5 )); then + echo "Error: Bash 5 or newer is required." >&2 + if [ "$(uname -s)" == "Darwin" ]; then + echo "On macOS, you can install it with Homebrew: brew install bash" >&2 + fi + exit 1 +fi + source $(git rev-parse --show-toplevel)/ci3/source -source $ci3/source_refname -source $ci3/source_redis + +# Silence parallel citation message. +mkdir -p $HOME/.parallel +touch $HOME/.parallel/will-cite cmd=${1:-} [ -n "$cmd" ] && shift diff --git a/ci3/source_color b/ci3/source_color index 0ef54be3316e..b23b8c52c492 100644 --- a/ci3/source_color +++ b/ci3/source_color @@ -1,52 +1,23 @@ # Color codes for output. -# Whatever the terminal defines. -# export yellow="\033[93m" -# export blue="\033[34m" -# export green="\033[32m" -# export red="\033[31m" -# export purple="\033[35m" - # Quite nice colors guessed by GPT. export yellow="\033[38;2;250;217;121m" +export orange="\033[38;2;255;165;0m" export blue="\033[38;2;95;167;241m" export green="\033[38;2;97;214;104m" export red="\033[38;2;230;69;83m" export purple="\033[38;2;188;109;208m" -# Copied from iTerm2 Pastel Dark Background -# export yellow="\033[38;2;255;253;201m" -# export blue="\033[38;2;175;212;250m" -# export green="\033[38;2;195;249;133m" -# export red="\033[38;2;239;138;121m" -# export purple="\033[38;2;240;150;248m" - export bold="\033[1m" export reset="\033[0m" +# Cursor control +export hide_cursor="\033[?25l" +export show_cursor="\033[?25h" + # OSC 8 support for links. export link_open='\033]8;;' export link_close='\x07' -function term_link { - echo -e "${yellow}${link_open}$1${link_close}$2${link_open}${link_close}${reset}" -} - -function ci_term_link { - term_link "http://ci.aztec-labs.com/$1" "$1" -} - -function pr_link { - local line=$1 - if [[ $line =~ \(#([0-9]+)\) ]]; then - pr_number="${BASH_REMATCH[1]}" - link=$(term_link https://github.com/aztecprotocol/aztec-packages/pull/$pr_number $pr_number) - echo -e "${line/\(#${pr_number}\)/(#${link})}" - else - echo -e "$line" - fi -} -export -f term_link ci_term_link pr_link - # We always want color. export FORCE_COLOR=true diff --git a/ci3/source_options b/ci3/source_options index 62cc3f2d26c9..21ad8ccc8050 100644 --- a/ci3/source_options +++ b/ci3/source_options @@ -1,6 +1,3 @@ -# If debugging we enable bash output. -[ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x || true - # Set best practices for early error exit and undefined variables. set -euo pipefail diff --git a/ci3/source_redis b/ci3/source_redis index 61cfacf14233..3ecc52a51590 100644 --- a/ci3/source_redis +++ b/ci3/source_redis @@ -23,20 +23,45 @@ if [ -z "${CI_REDIS_AVAILABLE:-}" ]; then else echo_stderr -e "${yellow}Failed to establish connection to remote redis. Log and test cache will be disabled.${reset}" fi - elif command -v docker &> /dev/null; then + elif docker ps &> /dev/null && [ "$CI" -eq 0 ]; then echo_stderr "Starting local redis container..." - docker run -d --rm --name ci-redis -p 6379:6379 redis:latest &>/dev/null && true + docker network create ci-network &>/dev/null || true + docker run -d --rm --name ci-redis -p 6379:6379 --network ci-network redis:latest redis-server --save "" --appendonly no &>/dev/null || true if nc -z $CI_REDIS 6379 &>/dev/null; then export CI_REDIS_AVAILABLE=1 else echo_stderr -e "${yellow}Failed to start a local redis. Log and test cache will be disabled.${reset}" fi + elif [ "$CI" -eq 0 ]; then + echo_stderr -e "${yellow}Cannot connect to remote redis, and no docker available for local redis. Log and test cache will be disabled.${reset}" + fi +fi + +# If we haven't yet checked for dashboard availability. +if [ -z "${DASHBOARD_URL:-}" ]; then + # By default, log links will go to the labs ci dashboard. + # We override to localhost if we're running local redis. + export DASHBOARD_URL="http://ci.aztec-labs.com" + + if command -v docker &>/dev/null && docker ps | grep -q "ci-redis"; then + if [ -z "$(docker ps -q --filter "name=^/ci-dashboard$")" ]; then + echo_stderr "Building local ci dashboard..." + (cd $ci3/dashboard && docker build -t aztecprotocol/ci-dashboard:latest .) &>/dev/null + echo_stderr "Starting local ci dashboard..." + docker network create ci-network &>/dev/null || true + docker run -d --rm --name ci-dashboard -p 4273:8080 \ + -e REDIS_HOST=ci-redis \ + -e REDIS_PORT=6379 \ + -e DASHBOARD_PORT=4273 \ + --network ci-network \ + aztecprotocol/ci-dashboard:latest &>/dev/null + fi + export DASHBOARD_URL="http://localhost:4273" fi fi # If redis isn't available, neither is the test cache. if [ "$CI_REDIS_AVAILABLE" -eq 0 ]; then - echo_stderr -e "${yellow}Redis unavailable. Disabling test cache.${reset}" export USE_TEST_CACHE=0 fi diff --git a/ci3/source_stdlib b/ci3/source_stdlib index 49c7edcbb3be..5416a89a0672 100644 --- a/ci3/source_stdlib +++ b/ci3/source_stdlib @@ -33,6 +33,8 @@ function get_num_cpus { else echo $((cpu_quota / cpu_period)) fi + elif [[ "$(uname)" == "Darwin" ]]; then + sysctl -n hw.ncpu else nproc fi @@ -52,4 +54,23 @@ function urlencode { printf '%s' "$1" | python3 -c "import urllib.parse, sys; print(urllib.parse.quote(sys.stdin.read(), safe=''))" } -export -f hash_str hash_str_orig echo_stderr uuid get_num_cpus get_num_cpus_max urlencode +function term_link { + echo -e "${yellow}${link_open}$1${link_close}$2${link_open}${link_close}${reset}" +} + +function ci_term_link { + term_link "$DASHBOARD_URL/$1" "$1" +} + +function pr_link { + local line=$1 + if [[ $line =~ \(#([0-9]+)\) ]]; then + pr_number="${BASH_REMATCH[1]}" + link=$(term_link https://github.com/aztecprotocol/aztec-packages/pull/$pr_number $pr_number) + echo -e "${line/\(#${pr_number}\)/(#${link})}" + else + echo -e "$line" + fi +} + +export -f hash_str hash_str_orig echo_stderr uuid get_num_cpus get_num_cpus_max urlencode term_link ci_term_link pr_link diff --git a/ci3/spinner b/ci3/spinner new file mode 100755 index 000000000000..a28e7ad86302 --- /dev/null +++ b/ci3/spinner @@ -0,0 +1,77 @@ +#!/usr/bin/env bash +# Shows a spinner while running a command. +# Usage: spinner +# In interactive mode: prints message + spinner, then "done." in green on success +# In non-interactive mode: just prints message and newline +# On failure: outputs dump_fail style error info +set -uo pipefail +source $(git rev-parse --show-toplevel)/ci3/source_color + +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " + exit 1 +fi + +message="$1" +command="$2" + +# Check if we're in a terminal +if [ ! -t 1 ]; then + # Non-interactive mode: just print message and run command + echo "$message" + bash -c "set -euo pipefail; $command" + exit $? +fi + +# Interactive mode: show spinner +echo -n "$message " + +# Spinner characters (braille circle worm) +spinner_chars=(⠋ ⠙ ⠹ ⠸ ⠼ ⠴ ⠦ ⠧ ⠇ ⠏) + +# Hide cursor and ensure it's restored on exit +printf "$hide_cursor" + +# Run command in background, capturing output +output=$(mktemp) +trap "printf '$show_cursor'; rm -f $output" EXIT + +bash -c "set -euo pipefail; $command" >$output 2>&1 & +pid=$! +trap 'kill -SIGTERM $pid &>/dev/null || true' SIGTERM + +# Show spinner while command runs +i=0 +while kill -0 $pid 2>/dev/null; do + i=$(( (i+1) %10 )) + printf "\r$message ${orange}${spinner_chars[$i]}${reset}" + sleep 0.1 +done + +# Wait for the command to complete and get status +# We wait twice, as the first awakening might be due to this script receiving SIGTERM. +# In that case wait will return with 143, and the trap handler is immediately called. +# We then wait on the pid again, to get the actual status code. +wait $pid +wait $pid +status=$? + +# Show cursor again +printf "$show_cursor" + +# 0 or SIGTERM considered a success. +if [ "$status" -eq 0 ] || [ "$status" -eq 143 ]; then + # Success - print done in green + printf "\r$message ${green}done.${reset}\n" +else + # Failure - clear the line and show dump_fail style output + printf "\r\033[K" + { + echo -e "${red}command failed${reset}: $command (exit: $status)" + echo -e "${blue}--- output ---${reset}" + cat $output + echo -e "${blue}--------------${reset}" + } >&2 +fi + +exit $status diff --git a/ci3/tests/color_prefix_test b/ci3/tests/color_prefix_test new file mode 100755 index 000000000000..42d82eadfcfd --- /dev/null +++ b/ci3/tests/color_prefix_test @@ -0,0 +1,224 @@ +#!/usr/bin/env bash +# Tests for color_prefix script. +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CP="$SCRIPT_DIR/../color_prefix" + +RED=$'\033[0;31m' +GREEN=$'\033[0;32m' +NC=$'\033[0m' + +TESTS_PASSED=0 +TESTS_FAILED=0 + +pass() { echo "${GREEN}✓${NC} $1"; ((TESTS_PASSED++)) || true; } +fail() { + echo "${RED}✗${NC} $1: $2" + ((TESTS_FAILED++)) || true +} + +# Strip ANSI escape codes for content matching. +strip_ansi() { sed 's/\x1b\[[0-9;]*m//g'; } + +echo "=== Color Prefix Test Suite ===" + +# --- 1. Basic output prefixing --- +echo "--- 1. Basic Output ---" + +output=$("$CP" "myjob" "echo hello" 2>&1) +plain=$(echo "$output" | strip_ansi) +if [[ "$plain" == "[myjob] hello" ]]; then + pass "single line prefixed" +else + fail "single line prefixed" "got: $plain" +fi + +# --- 2. Multiple lines --- +output=$("$CP" "multi" "echo line1; echo line2; echo line3" 2>&1) +count=$(echo "$output" | wc -l) +if [[ "$count" -eq 3 ]]; then + pass "multiple lines all prefixed ($count lines)" +else + fail "multiple lines all prefixed" "expected 3 lines, got $count" +fi + +# Every line should have the prefix. +bad=$(echo "$output" | strip_ansi | grep -cv '^\[multi\]' || true) +if [[ "$bad" -eq 0 ]]; then + pass "every line has prefix" +else + fail "every line has prefix" "$bad lines missing prefix" +fi + +# --- 3. Exit code propagation --- +echo "--- 2. Exit Codes ---" + +code=0 +"$CP" "ok" "exit 0" &>/dev/null || code=$? +if [[ "$code" -eq 0 ]]; then + pass "exit 0 propagated" +else + fail "exit 0 propagated" "got $code" +fi + +code=0 +"$CP" "fail1" "exit 1" &>/dev/null || code=$? +if [[ "$code" -eq 1 ]]; then + pass "exit 1 propagated" +else + fail "exit 1 propagated" "got $code" +fi + +code=0 +"$CP" "fail42" "exit 42" &>/dev/null || code=$? +if [[ "$code" -eq 42 ]]; then + pass "exit 42 propagated" +else + fail "exit 42 propagated" "got $code" +fi + +# --- 4. stderr is captured and prefixed --- +echo "--- 3. Stderr Handling ---" + +output=$("$CP" "err" "echo on-stdout; echo on-stderr >&2" 2>&1) +plain=$(echo "$output" | strip_ansi) +if echo "$plain" | grep -q '\[err\] on-stderr'; then + pass "stderr lines are prefixed" +else + fail "stderr lines are prefixed" "got: $plain" +fi + +# --- 5. No leaked processes --- +echo "--- 4. Process Cleanup ---" + +"$CP" "clean" "echo done" &>/dev/null +sleep 0.3 +leaked_awk=$(pgrep -f "awk.*clean.*fflush" 2>/dev/null || true) +leaked_fifo=$(ls /tmp/tmp.* 2>/dev/null | head -5 || true) +if [[ -z "$leaked_awk" ]]; then + pass "no leaked awk process" +else + fail "no leaked awk process" "pids: $leaked_awk" + kill $leaked_awk 2>/dev/null || true +fi + +# --- 6. No leaked fifo --- +# Run several times and check /tmp doesn't accumulate fifos. +before=$(find /tmp -maxdepth 1 -type p 2>/dev/null | wc -l) +for i in $(seq 1 5); do + "$CP" "fifo$i" "echo hi" &>/dev/null +done +after=$(find /tmp -maxdepth 1 -type p 2>/dev/null | wc -l) +if [[ "$after" -le "$before" ]]; then + pass "no leaked fifos after 5 runs" +else + fail "no leaked fifos after 5 runs" "before=$before after=$after" +fi + +# --- 7. SIGTERM kills cleanly --- +echo "--- 5. Signal Handling ---" + +# Use setsid to put color_prefix in its own session so SIGTERM doesn't reach us. +setsid "$CP" "sig" "sleep 60" &>/dev/null & +cp_pid=$! +sleep 0.5 +# Send SIGTERM to color_prefix only (not the whole group). +kill -TERM $cp_pid 2>/dev/null +code=0 +wait $cp_pid 2>/dev/null || code=$? +# 130 (from on_signal handler) or 143 (SIGTERM default) +if [[ "$code" -eq 130 ]] || [[ "$code" -eq 143 ]]; then + pass "SIGTERM exits with $code" +else + fail "SIGTERM exits cleanly" "got exit code $code" +fi + +# Ensure the sleep child was also killed. +sleep 0.5 +if ! pgrep -f "sleep 60" &>/dev/null; then + pass "child process killed on SIGTERM" +else + fail "child process killed on SIGTERM" "sleep 60 still running" + pkill -f "sleep 60" 2>/dev/null || true +fi + +# --- 8. Background children don't block exit --- +echo "--- 6. Fd Leak Prevention ---" + +start=$SECONDS +code=0 +timeout 10 "$CP" "bg" "echo before; sleep 999 /dev/null 2>&1 & disown; echo after" &>/dev/null || code=$? +elapsed=$((SECONDS - start)) + +if [[ "$elapsed" -lt 8 ]]; then + pass "exits promptly despite background child (${elapsed}s)" +else + fail "exits promptly despite background child" "took ${elapsed}s" +fi +pkill -f "sleep 999" 2>/dev/null || true + +# --- 9. Large output is fully flushed --- +echo "--- 7. Output Flushing ---" + +output=$("$CP" "flush" "seq 1 500" 2>&1) +count=$(echo "$output" | wc -l) +if [[ "$count" -eq 500 ]]; then + pass "all 500 lines flushed" +else + fail "all 500 lines flushed" "got $count lines" +fi + +# Verify last line is correct. +last=$(echo "$output" | tail -1 | strip_ansi) +if [[ "$last" == "[flush] 500" ]]; then + pass "last line is correct" +else + fail "last line is correct" "got: $last" +fi + +# --- 10. Special characters in output --- +echo "--- 8. Special Characters ---" + +output=$("$CP" "special" "echo 'hello \"world\" \$PATH \\backslash'" 2>&1) +plain=$(echo "$output" | strip_ansi) +if echo "$plain" | grep -q 'hello "world"'; then + pass "quotes preserved" +else + fail "quotes preserved" "got: $plain" +fi + +# --- 11. Empty output --- +echo "--- 9. Edge Cases ---" + +output=$("$CP" "empty" "true" 2>&1) +if [[ -z "$output" ]]; then + pass "no output for silent command" +else + fail "no output for silent command" "got: $output" +fi + +# --- 12. Rapid successive runs --- +pids=() +for i in $(seq 1 5); do + "$CP" "rapid$i" "echo run$i" &>/dev/null & + pids+=($!) +done +all_ok=1 +for pid in "${pids[@]}"; do + wait $pid 2>/dev/null || all_ok=0 +done +if [[ "$all_ok" -eq 1 ]]; then + pass "5 concurrent runs all succeed" +else + fail "5 concurrent runs all succeed" "some exited non-zero" +fi + +echo +if [[ $TESTS_FAILED -eq 0 ]]; then + echo "${GREEN}All $TESTS_PASSED tests passed.${NC}" + exit 0 +else + echo "${RED}$TESTS_FAILED of $((TESTS_PASSED + TESTS_FAILED)) tests failed.${NC}" + exit 1 +fi diff --git a/ci3/tests/denoise_test b/ci3/tests/denoise_test new file mode 100755 index 000000000000..4b42e1fa8431 --- /dev/null +++ b/ci3/tests/denoise_test @@ -0,0 +1,144 @@ +#!/usr/bin/env bash +# Tests for denoise script, specifically: +# - tail -f is killed (not just the dots loop) +# - dots loop drains fully before denoise exits +# - denoise exits cleanly even when the command spawns background children +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +RED=$'\033[0;31m' +GREEN=$'\033[0;32m' +NC=$'\033[0m' + +TESTS_PASSED=0 +TESTS_FAILED=0 + +pass() { echo "${GREEN}✓${NC} $1"; ((TESTS_PASSED++)) || true; } +fail() { + echo "${RED}✗${NC} $1: $2" + ((TESTS_FAILED++)) || true +} + +echo "=== Denoise Test Suite ===" + +# Common env for denoise: enable it, disable redis, set narrow dot width for fast tests. +DENOISE_ENV=( + DENOISE=1 + CI_REDIS_AVAILABLE=0 + DENOISE_WIDTH=4 + DUMP_FAIL=0 + FORCE_COLOR=0 + PATH="$PATH" + HOME="$HOME" +) + +# --- Test 1: denoise exits and no tail -f is left behind --- +echo "--- Test: no leaked tail -f after denoise exits ---" + +# Run denoise with a simple command that produces a few lines. +code=0 +env "${DENOISE_ENV[@]}" "$ROOT/ci3/denoise" 'for i in $(seq 1 10); do echo line$i; done' &>/dev/null || code=$? + +# Check no tail -f is left from our denoise run. +# Give a moment for any orphans to appear. +sleep 0.5 +leftover=$(pgrep -f "tail --sleep-interval=0.2.*-f /tmp/" 2>/dev/null || true) +if [ -z "$leftover" ]; then + pass "no leaked tail -f processes" +else + fail "no leaked tail -f processes" "found tail pids: $leftover" + kill $leftover 2>/dev/null || true +fi + +if [ "$code" -eq 0 ]; then + pass "denoise exits 0 on success" +else + fail "denoise exits 0 on success" "exit code $code" +fi + +# --- Test 2: tail -f is killed, not just the dots loop --- +echo "--- Test: tail -f is killed directly ---" + +# Run denoise and capture its PID so we can inspect children. +env "${DENOISE_ENV[@]}" "$ROOT/ci3/denoise" \ + 'for i in $(seq 1 20); do echo line$i; sleep 0.05; done' &>/dev/null & +denoise_pid=$! + +# Wait for tail to appear as a child. +tail_found=0 +for i in $(seq 1 30); do + if pgrep -P $denoise_pid -f "tail --sleep-interval" &>/dev/null; then + tail_found=1 + break + fi + sleep 0.1 +done + +# Wait for denoise to finish. +wait $denoise_pid 2>/dev/null || true + +# After denoise exits, no tail should remain. +sleep 0.3 +leftover_tail=$(pgrep -f "tail --sleep-interval=0.2.*-f /tmp/" 2>/dev/null || true) + +if [ "$tail_found" -eq 1 ]; then + pass "tail -f was started during denoise" +else + fail "tail -f was started during denoise" "never saw tail process" +fi + +if [ -z "$leftover_tail" ]; then + pass "tail -f was cleaned up after denoise exit" +else + fail "tail -f was cleaned up after denoise exit" "found tail pids: $leftover_tail" + kill $leftover_tail 2>/dev/null || true +fi + +# --- Test 3: denoise propagates non-zero exit code --- +echo "--- Test: denoise propagates failure exit code ---" + +code=0 +env "${DENOISE_ENV[@]}" "$ROOT/ci3/denoise" 'echo failing; exit 42' &>/dev/null || code=$? + +if [ "$code" -eq 42 ]; then + pass "denoise propagates exit code 42" +else + fail "denoise propagates exit code 42" "got exit code $code" +fi + +# --- Test 4: dots are flushed (output contains dot progress) --- +echo "--- Test: dot progress appears in output ---" + +output=$(env "${DENOISE_ENV[@]}" "$ROOT/ci3/denoise" \ + 'for i in $(seq 1 10); do echo line$i; sleep 0.05; done' 2>&1) + +if echo "$output" | grep -q '\.'; then + pass "dot progress appears in output" +else + fail "dot progress appears in output" "no dots found in: $output" +fi + +# --- Test 5: denoise exits promptly (not stuck waiting on pipe) --- +echo "--- Test: denoise exits within timeout ---" + +start=$SECONDS +code=0 +timeout 10 env "${DENOISE_ENV[@]}" "$ROOT/ci3/denoise" 'echo fast' &>/dev/null || code=$? +elapsed=$((SECONDS - start)) + +if [ "$code" -eq 0 ] && [ "$elapsed" -lt 8 ]; then + pass "denoise exits promptly (${elapsed}s)" +else + fail "denoise exits promptly" "code=$code elapsed=${elapsed}s" +fi + +echo +if [[ $TESTS_FAILED -eq 0 ]]; then + echo "${GREEN}All $TESTS_PASSED tests passed.${NC}" + exit 0 +else + echo "${RED}$TESTS_FAILED of $((TESTS_PASSED + TESTS_FAILED)) tests failed.${NC}" + exit 1 +fi diff --git a/ci3/tests/redact_test b/ci3/tests/redact_test index 3f5faf29b038..c5280c6246dc 100755 --- a/ci3/tests/redact_test +++ b/ci3/tests/redact_test @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Comprehensive Redaction Test Suite set -euo pipefail diff --git a/ci3/watch_ci b/ci3/watch_ci index 73622c762508..643cdb722e8a 100755 --- a/ci3/watch_ci +++ b/ci3/watch_ci @@ -1,6 +1,5 @@ #!/usr/bin/env bash source $(git rev-parse --show-toplevel)/ci3/source -source $ci3/source_color source $HOME/py-env/bin/activate # Parse arguments diff --git a/docs/scripts/aztecjs_reference_generation/generate_docs.sh b/docs/scripts/aztecjs_reference_generation/generate_docs.sh index 3e43731f24f1..f75a77366e1f 100755 --- a/docs/scripts/aztecjs_reference_generation/generate_docs.sh +++ b/docs/scripts/aztecjs_reference_generation/generate_docs.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Convenience script to generate Aztec.js API documentation in one step # Usage: ./scripts/aztecjs_reference_generation/generate_docs.sh [output_dir] [--validate] diff --git a/docs/scripts/aztecjs_reference_generation/update_docs.sh b/docs/scripts/aztecjs_reference_generation/update_docs.sh index ac17af6ce7d6..b574a97938ef 100755 --- a/docs/scripts/aztecjs_reference_generation/update_docs.sh +++ b/docs/scripts/aztecjs_reference_generation/update_docs.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to regenerate auto-generated Aztec.js API documentation # Usage: ./scripts/aztecjs_reference_generation/update_docs.sh [target_version] # diff --git a/docs/scripts/cleanup_nightly_versions.sh b/docs/scripts/cleanup_nightly_versions.sh index eb9e8fdb330c..ca8815a8fc39 100755 --- a/docs/scripts/cleanup_nightly_versions.sh +++ b/docs/scripts/cleanup_nightly_versions.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to clean up nightly documentation versions # This removes all versions containing "nightly" from the Build docs diff --git a/docs/scripts/cli_reference_generation/generate_cli_docs.sh b/docs/scripts/cli_reference_generation/generate_cli_docs.sh index c2598009cda0..1b2c97e9ee59 100755 --- a/docs/scripts/cli_reference_generation/generate_cli_docs.sh +++ b/docs/scripts/cli_reference_generation/generate_cli_docs.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to generate auto-generated CLI documentation # Usage: ./scripts/cli_reference_generation/generate_cli_docs.sh [OPTIONS] [target_version] [output_dir] # diff --git a/docs/scripts/get_current_version.sh b/docs/scripts/get_current_version.sh index 2a06d207c2e0..78005fe7f897 100755 --- a/docs/scripts/get_current_version.sh +++ b/docs/scripts/get_current_version.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script fetches the current version from versioned_docs directory # Returns the version number (e.g. "v0.85.0") diff --git a/docs/scripts/update_docs_versions.sh b/docs/scripts/update_docs_versions.sh index 972286a2810a..098f1904cf39 100755 --- a/docs/scripts/update_docs_versions.sh +++ b/docs/scripts/update_docs_versions.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script updates versions.json for a specific docs instance with the latest versions. # It automatically detects if nightly versions exist and includes them appropriately. diff --git a/noir-projects/noir-contracts/bootstrap.sh b/noir-projects/noir-contracts/bootstrap.sh index 74fd5ce1cc91..9e472f0f4d16 100755 --- a/noir-projects/noir-contracts/bootstrap.sh +++ b/noir-projects/noir-contracts/bootstrap.sh @@ -191,8 +191,17 @@ function compile { # .[0] is the original json (at $json_path) # .[1] is the updated functions on stdin (-) # * merges their fields. - jq -c '.functions[]' $json_path | \ - parallel $PARALLEL_FLAGS --keep-order -N1 --block 8M --pipe process_function $contract_hash | \ + # Write each function to a separate temp file to avoid pipe/stdin issues with large JSON + local func_dir=$(mktemp -d -p $tmp_dir) + local i=0 + while IFS= read -r func_json; do + echo "$func_json" > "$func_dir/$i.json" + ((i++)) || true + done < <(jq -c '.functions[]' $json_path) + + # Process each function file in parallel + ls "$func_dir"/*.json | sort -V | \ + parallel $PARALLEL_FLAGS --keep-order 'cat {} | process_function '"$contract_hash" | \ jq -s '{functions: .}' | jq -s '.[0] * {functions: .[1].functions}' $json_path - > $tmp_dir/$filename mv $tmp_dir/$filename $json_path } diff --git a/noir-projects/noir-protocol-circuits/scripts/prettify_abi_json.sh b/noir-projects/noir-protocol-circuits/scripts/prettify_abi_json.sh index 4e404fb180bf..8d72919541fe 100755 --- a/noir-projects/noir-protocol-circuits/scripts/prettify_abi_json.sh +++ b/noir-projects/noir-protocol-circuits/scripts/prettify_abi_json.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # VSCode is meant to be able to format json for me, but it's not doing it, so here's a script (which admittedly is longer than the underlying `jq` command, but I'm forgetful). diff --git a/release-image/bootstrap.sh b/release-image/bootstrap.sh index 65bbbcec3ba3..ad415d70c477 100755 --- a/release-image/bootstrap.sh +++ b/release-image/bootstrap.sh @@ -72,6 +72,11 @@ export -f build_image function build { echo_header "release-image build" + if ! command -v docker &>/dev/null; then + echo "Docker is required to build the release image. Skipping." + exit 0 + fi + if ! cache_download release-image-base-$hash.zst; then denoise "cd .. && docker build -f release-image/Dockerfile.base -t aztecprotocol/release-image-base ." docker save aztecprotocol/release-image-base:latest > release-image-base @@ -88,6 +93,10 @@ function build { } function test_cmds { + if ! command -v docker &>/dev/null; then + exit 0 + fi + # Very simple sanity test. echo "$hash docker run --rm aztecprotocol/aztec --version" } diff --git a/scripts/merge-train/auto-merge.sh b/scripts/merge-train/auto-merge.sh index 00549eca9f7b..4f76afcecd8d 100755 --- a/scripts/merge-train/auto-merge.sh +++ b/scripts/merge-train/auto-merge.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/scripts/merge-train/merge-next.sh b/scripts/merge-train/merge-next.sh index 18bb64467828..69359e75e2da 100755 --- a/scripts/merge-train/merge-next.sh +++ b/scripts/merge-train/merge-next.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/scripts/merge-train/squash-pr.sh b/scripts/merge-train/squash-pr.sh index 65e883c446c0..3c8151f88e21 100755 --- a/scripts/merge-train/squash-pr.sh +++ b/scripts/merge-train/squash-pr.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -xeuo pipefail @@ -93,10 +93,10 @@ git commit -m "$commit_message" --no-verify if [[ "$is_fork" == "true" ]]; then # It's a fork - need to push to the fork repository echo "Detected fork: pushing to $head_repo" - + # Add the fork as a remote (assumes GITHUB_TOKEN env var is set from workflow) git remote add fork "https://x-access-token:${GITHUB_TOKEN}@github.com/${head_repo}.git" - + # Push to the fork git push --force fork "HEAD:refs/heads/$branch" else diff --git a/scripts/merge-train/update-pr-body.sh b/scripts/merge-train/update-pr-body.sh index ae6a2d4fd95a..6b780f9a84cc 100755 --- a/scripts/merge-train/update-pr-body.sh +++ b/scripts/merge-train/update-pr-body.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/spartan/eth-devnet/entrypoints/eth-execution-nethermind.sh b/spartan/eth-devnet/entrypoints/eth-execution-nethermind.sh index 03a72eb6876b..bfba3bfc913a 100644 --- a/spartan/eth-devnet/entrypoints/eth-execution-nethermind.sh +++ b/spartan/eth-devnet/entrypoints/eth-execution-nethermind.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash cat /genesis/jwt-secret.hex diff --git a/spartan/metrics/irm-monitor/scripts/build-and-publish.sh b/spartan/metrics/irm-monitor/scripts/build-and-publish.sh index b01f945ca343..fd072eaa8bd0 100755 --- a/spartan/metrics/irm-monitor/scripts/build-and-publish.sh +++ b/spartan/metrics/irm-monitor/scripts/build-and-publish.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/spartan/metrics/irm-monitor/scripts/silence-alerts.sh b/spartan/metrics/irm-monitor/scripts/silence-alerts.sh index d299e8629a50..a8178b06449e 100755 --- a/spartan/metrics/irm-monitor/scripts/silence-alerts.sh +++ b/spartan/metrics/irm-monitor/scripts/silence-alerts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/spartan/metrics/irm-monitor/scripts/update-monitoring.sh b/spartan/metrics/irm-monitor/scripts/update-monitoring.sh index 69e860041bf4..1926953bb76e 100755 --- a/spartan/metrics/irm-monitor/scripts/update-monitoring.sh +++ b/spartan/metrics/irm-monitor/scripts/update-monitoring.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e diff --git a/spartan/scripts/check_env_vars.sh b/spartan/scripts/check_env_vars.sh index f4f92e9b6f10..ec6d4e84ba12 100755 --- a/spartan/scripts/check_env_vars.sh +++ b/spartan/scripts/check_env_vars.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to check that all environment variables used in Helm templates, values files, and Terraform scripts # are defined in yarn-project/foundation/src/config/env_var.ts diff --git a/spartan/scripts/cleanup_helm.sh b/spartan/scripts/cleanup_helm.sh index c89291005a2b..f3d149cbf79e 100755 --- a/spartan/scripts/cleanup_helm.sh +++ b/spartan/scripts/cleanup_helm.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to manually clean up stuck Helm releases # Usage: ./cleanup_helm.sh [release-name] [namespace] diff --git a/spartan/scripts/deploy_chaos_mesh.sh b/spartan/scripts/deploy_chaos_mesh.sh index a5ca828111ac..25b5b10ee374 100755 --- a/spartan/scripts/deploy_chaos_mesh.sh +++ b/spartan/scripts/deploy_chaos_mesh.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail @@ -40,7 +40,7 @@ fi DESTROY_CHAOS_MESH=${DESTROY_CHAOS_MESH:-false} CREATE_CHAOS_MESH=${CREATE_CHAOS_MESH:-true} -if [[ -z "${ENABLE_SAFE_MODE:-}" ]]; then +if [[ -z "${ENABLE_SAFE_MODE:-}" ]]; then if [[ "$CLUSTER" == "kind" ]]; then ENABLE_SAFE_MODE="false" else diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index dc40910b6de2..981b0738fba4 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail # Resolve repo root and script directory for reliable relative paths diff --git a/spartan/scripts/setup_gcp_secrets.sh b/spartan/scripts/setup_gcp_secrets.sh index 362544669e36..9eadb39ecdc1 100755 --- a/spartan/scripts/setup_gcp_secrets.sh +++ b/spartan/scripts/setup_gcp_secrets.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail diff --git a/spartan/terraform/purge_local_state.sh b/spartan/terraform/purge_local_state.sh index 2bc521b54aa5..2e910240682b 100755 --- a/spartan/terraform/purge_local_state.sh +++ b/spartan/terraform/purge_local_state.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # make sure we're in the directory where the script is located cd "$(dirname "$0")" diff --git a/yarn-project/end-to-end/.gitignore b/yarn-project/end-to-end/.gitignore index 2e6bd96ac9de..0d793c08af67 100644 --- a/yarn-project/end-to-end/.gitignore +++ b/yarn-project/end-to-end/.gitignore @@ -5,5 +5,5 @@ example-app-ivc-inputs-out dumped-avm-circuit-inputs ultrahonk-bench-inputs web/main.js* -consensys_web3signer_25.11.0 +consensys_web3signer_* scripts/ha/postgres_data/ diff --git a/yarn-project/simulator/scripts/run_avm_brilling_fuzz.sh b/yarn-project/simulator/scripts/run_avm_brilling_fuzz.sh index 65b62ed3dec4..9c2f879a6c6e 100755 --- a/yarn-project/simulator/scripts/run_avm_brilling_fuzz.sh +++ b/yarn-project/simulator/scripts/run_avm_brilling_fuzz.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # Colors for output