Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions barretenberg/acir_tests/bbjs-test/.yarnrc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
nodeLinker: node-modules
15 changes: 15 additions & 0 deletions barretenberg/acir_tests/bbjs-test/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"name": "bbjs-test",
"packageManager": "yarn@4.5.2",
"main": "dest/index.js",
"scripts": {
"build": "tsc"
},
"dependencies": {
"@aztec/bb.js": "portal:../../ts",
"commander": "^12.1.0"
},
"devDependencies": {
"typescript": "^5.6.3"
}
}
85 changes: 85 additions & 0 deletions barretenberg/acir_tests/bbjs-test/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import createDebug from "debug";
import fs from "fs/promises";
import path from "path";
import { Command } from "commander";
import assert from "assert";

createDebug.enable("*");
const debug = createDebug("bbjs-test");

const proofPath = (dir: string) => path.join(dir, "proof");
const publicInputsPath = (dir: string) => path.join(dir, "public-inputs");
const vkeyPath = (dir: string) => path.join(dir, "vk");

async function generateProof({
bytecodePath,
witnessPath,
outputDirectory,
oracleHash,
multiThreaded,
}: {
bytecodePath: string;
witnessPath: string;
outputDirectory: string;
oracleHash?: string;
multiThreaded?: boolean;
}) {
const { UltraHonkBackend } = await import("@aztec/bb.js");

debug(`Generating proof for ${bytecodePath}...`);
const circuitArtifact = await fs.readFile(bytecodePath);
const bytecode = JSON.parse(circuitArtifact.toString()).bytecode;
const backend = new UltraHonkBackend(bytecode, { threads: multiThreaded ? 8 : 1 });

const witness = await fs.readFile(witnessPath);
const proof = await backend.generateProof(new Uint8Array(witness), { keccak: (oracleHash === "keccak") });

await fs.writeFile(proofPath(outputDirectory), Buffer.from(proof.proof));
debug("Proof written to " + proofPath(outputDirectory));

await fs.writeFile(publicInputsPath(outputDirectory), JSON.stringify(proof.publicInputs));
debug("Public inputs written to " + publicInputsPath(outputDirectory));

const verificationKey = await backend.getVerificationKey({ keccak: (oracleHash === "keccak") });
await fs.writeFile(vkeyPath(outputDirectory), Buffer.from(verificationKey));
debug("Verification key written to " + vkeyPath(outputDirectory));

await backend.destroy();
}

async function verifyProof({ directory }: { directory: string }) {
const { BarretenbergVerifier } = await import("@aztec/bb.js");

const verifier = new BarretenbergVerifier();

const proof = await fs.readFile(proofPath(directory));
const publicInputs = JSON.parse(await fs.readFile(publicInputsPath(directory), "utf8"));
const vkey = await fs.readFile(vkeyPath(directory));

const verified = await verifier.verifyUltraHonkProof(
{ proof: new Uint8Array(proof), publicInputs },
new Uint8Array(vkey)
);

await verifier.destroy();
debug(`Proof verified: ${verified}`);
}

// Prepare a minimal command line interface
const program = new Command();

program
.command("prove")
.option("-b, --bytecode-path <path>", "bytecode path")
.option("-w, --witness-path <path>", "witness path")
.option("-o, --output-directory <path>", "output directory")
.option("-h, --oracle-hash <hash>", "oracle hash")
.option("-multi-threaded", "multi-threaded")
.action((args) => generateProof(args));

program
.command("verify")
.option("-d, --directory <path>", "directory")
.action((args) => verifyProof(args));

program.parse(process.argv);
19 changes: 19 additions & 0 deletions barretenberg/acir_tests/bbjs-test/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{
"compilerOptions": {
"target": "es2020",
"lib": ["dom", "esnext", "es2017.object"],
"module": "NodeNext",
"strict": true,
"declaration": true,
"allowSyntheticDefaultImports": true,
"esModuleInterop": true,
"downlevelIteration": true,
"inlineSourceMap": true,
"declarationMap": true,
"importHelpers": true,
"resolveJsonModule": true,
"outDir": "dest",
"rootDir": "src"
},
"include": ["src"]
}
18 changes: 18 additions & 0 deletions barretenberg/acir_tests/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ function build {
# find {headless-test,browser-test-app} -exec touch -t 197001010000 {} + 2>/dev/null || true

denoise "cd browser-test-app && yarn build"

denoise "cd bbjs-test && yarn build"
}

function test {
Expand Down Expand Up @@ -187,6 +189,22 @@ function test_cmds_internal {
echo SYS=ultra_honk FLOW=prove_then_verify RECURSIVE=true $run_test double_verify_honk_proof
echo SYS=ultra_honk FLOW=prove_then_verify HASH=keccak $run_test assert_statement
echo SYS=ultra_honk FLOW=prove_then_verify ROLLUP=true $run_test verify_rollup_honk_proof

# prove and verify using bb.js classes
echo SYS=ultra_honk FLOW=bbjs_prove_verify $run_test 1_mul
echo SYS=ultra_honk FLOW=bbjs_prove_verify THREAD_MODEL=mt $run_test assert_statement

# prove with bb.js and verify with solidity verifier
echo SYS=ultra_honk FLOW=bbjs_prove_sol_verify $run_test 1_mul
echo SYS=ultra_honk FLOW=bbjs_prove_sol_verify $run_test assert_statement

# prove with bb cli and verify with bb.js classes
echo SYS=ultra_honk FLOW=bb_prove_bbjs_verify $run_test 1_mul
echo SYS=ultra_honk FLOW=bb_prove_bbjs_verify $run_test assert_statement

# prove with bb.js and verify with bb cli
echo SYS=ultra_honk FLOW=bbjs_prove_bb_verify $run_test 1_mul
echo SYS=ultra_honk FLOW=bbjs_prove_bb_verify $run_test assert_statement
}

function ultra_honk_wasm_memory {
Expand Down
49 changes: 49 additions & 0 deletions barretenberg/acir_tests/flows/bb_prove_bbjs_verify.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/bin/bash

# prove with bb.js and verify using bb classes
set -eu

if [ "${SYS:-}" != "ultra_honk" ]; then
echo "Error: This flow only supports ultra_honk"
exit 1
fi

artifact_dir=$(realpath ./target)
output_dir=$artifact_dir/bbjs-bb-tmp
mkdir -p $output_dir

# Cleanup on exit
trap "rm -rf $output_dir" EXIT

# Generate the proof and VK using BB CLI (save as both bytes and fields)
$BIN prove \
--scheme ultra_honk \
-b $artifact_dir/program.json \
-w $artifact_dir/witness.gz \
--output_format bytes_and_fields \
-o $output_dir

# Generate the VK using BB CLI
$BIN write_vk \
--scheme ultra_honk \
-b $artifact_dir/program.json \
-o $output_dir

# bb.js expects proof and public inputs to be separate files, so we need to split them
# this will not be needed after #11024

# Save public inputs as separate file (first NUM_PUBLIC_INPUTS fields of proof_fields.json)
PROOF_FIELDS_LENGTH=$(jq 'length' $output_dir/proof_fields.json)
UH_PROOF_FIELDS_LENGTH=440
NUM_PUBLIC_INPUTS=$((PROOF_FIELDS_LENGTH - UH_PROOF_FIELDS_LENGTH))
jq ".[:$NUM_PUBLIC_INPUTS]" $output_dir/proof_fields.json > $output_dir/public-inputs

# Remove NUM_PUBLIC_INPUTS*32 bytes from the proof
proof_hex=$(cat $output_dir/proof | xxd -p)
proof_start=${proof_hex:0:8}
proof_end=${proof_hex:$((8 + NUM_PUBLIC_INPUTS * 64))}
echo -n $proof_start$proof_end | xxd -r -p > $output_dir/proof

# Verify the proof with bb.js classes
node ../../bbjs-test verify \
-d $output_dir
47 changes: 47 additions & 0 deletions barretenberg/acir_tests/flows/bbjs_prove_bb_verify.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/bin/bash

# prove with bb.js and verify using bb cli
set -eu

if [ "${SYS:-}" != "ultra_honk" ]; then
echo "Error: This flow only supports ultra_honk"
exit 1
fi

artifact_dir=$(realpath ./target)
output_dir=$artifact_dir/bb-bbjs-tmp
mkdir -p $output_dir

# Cleanup on exit
trap "rm -rf $output_dir" EXIT

# Writes the proof, public inputs ./target; this also writes the VK
node ../../bbjs-test prove \
-b $artifact_dir/program.json \
-w $artifact_dir/witness.gz \
-o $output_dir

# Join the proof and public inputs to a single file
# this will not be needed after #11024

proof_bytes=$(cat $output_dir/proof | xxd -p)
public_inputs=$(cat $output_dir/public-inputs | jq -r '.[]')
proof_start=${proof_bytes:0:8}
proof_end=${proof_bytes:8}

public_inputs_bytes=""
for input in $public_inputs; do
public_inputs_bytes+=$input
done

# Combine proof start, public inputs, and rest of proof
echo -n $proof_start$public_inputs_bytes$proof_end | xxd -r -p > $output_dir/proof

# Print the length of the proof file in bytes
ls -l $output_dir/proof | awk '{print $5}'

# Verify the proof with bb cli
$BIN verify \
--scheme ultra_honk \
-k $output_dir/vk \
-p $output_dir/proof
39 changes: 39 additions & 0 deletions barretenberg/acir_tests/flows/bbjs_prove_sol_verify.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#!/bin/bash

# prove using bb.js and verify using solidity verifier
set -eu

if [ "${SYS:-}" != "ultra_honk" ]; then
echo "Error: This flow only supports ultra_honk"
exit 1
fi

artifact_dir=$(realpath ./target)
output_dir=$artifact_dir/bbjs-sol-tmp
mkdir -p $output_dir

# Cleanup on exit
trap "rm -rf $output_dir" EXIT

# Generate the proof and VK
node ../../bbjs-test prove \
-b $artifact_dir/program.json \
-w $artifact_dir/witness.gz \
-o $output_dir \
--oracle-hash keccak

# Write the solidity verifier to ./target
export VK=$output_dir/vk
export VERIFIER_PATH="$output_dir/Verifier.sol"

# Use the BB CLI to write the solidity verifier - this can also be done with bb.js
$BIN write_solidity_verifier --scheme ultra_honk -k $VK -o $VERIFIER_PATH

# Verify the proof using the solidity verifier
export PROOF=$output_dir/proof
export PUBLIC_INPUTS=$output_dir/public-inputs
export TEST_PATH=$(realpath "../../sol-test/HonkTest.sol")
export TESTING_HONK="true"
export TEST_NAME=$(basename $(realpath ./))

node ../../sol-test/src/index.js
27 changes: 27 additions & 0 deletions barretenberg/acir_tests/flows/bbjs_prove_verify.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#!/bin/bash

# prove and verify using bb.js classes
set -eu

if [ "${SYS:-}" != "ultra_honk" ]; then
echo "Error: This flow only supports ultra_honk"
exit 1
fi

artifact_dir=$(realpath ./target)
output_dir=$artifact_dir/bbjs-tmp
mkdir -p $output_dir

# Cleanup on exit
trap "rm -rf $output_dir" EXIT

# Writes the proof, public inputs ./target; this also writes the VK
node ../../bbjs-test prove \
-b $artifact_dir/program.json \
-w $artifact_dir/witness.gz \
-o $output_dir \
${THREAD_MODEL:-st} = "mt" && echo "--multi-threaded"

# Verify the proof by reading the files in ./target
node ../../bbjs-test verify \
-d $output_dir
3 changes: 2 additions & 1 deletion barretenberg/acir_tests/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"workspaces": [
"browser-test-app",
"headless-test",
"sol-test"
"sol-test",
"bbjs-test"
]
}
47 changes: 33 additions & 14 deletions barretenberg/acir_tests/sol-test/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -217,24 +217,43 @@ const killAnvil = () => {
};

try {
const proofAsFieldsPath = getEnvVar("PROOF_AS_FIELDS");
const proofAsFields = readFileSync(proofAsFieldsPath);
const [numPublicInputs, publicInputs] = readPublicInputs(
JSON.parse(proofAsFields.toString())
);

const proofPath = getEnvVar("PROOF");
const proof = readFileSync(proofPath);
let publicInputsPath;
try {
publicInputsPath = getEnvVar("PUBLIC_INPUTS");
} catch (e) {
// noop
}

let proofStr = '';
let publicInputs = [];

// If "path to public inputs" is provided, it means that the proof and public inputs are saved as separate files
// A bit hacky, but this can go away once BB CLI saves them as separate files - #11024
if (publicInputsPath) {
const proof = readFileSync(proofPath);
proofStr = proof.toString("hex");
publicInputs = JSON.parse(readFileSync(publicInputsPath).toString()); // assumes JSON array of PI hex strings
} else {
// Proof and public inputs are saved in a single file; we need to extract the PI from the proof
const proof = readFileSync(proofPath);
proofStr = proof.toString("hex");

const proofAsFieldsPath = getEnvVar("PROOF_AS_FIELDS");
const proofAsFields = readFileSync(proofAsFieldsPath);

let numPublicInputs;
[numPublicInputs, publicInputs] = readPublicInputs(
JSON.parse(proofAsFields.toString())
);

// Cut the number of public inputs out of the proof string
let proofStr = proof.toString("hex");
proofStr = proofStr.substring(32 * 2 * numPublicInputs); // Remove the publicInput bytes from the proof
}

// Honk proof have field length as the first 4 bytes
// This should go away in the future
if (testingHonk) {
// Cut off the serialised buffer size at start
proofStr = proofStr.substring(8);
// Get the part after the public inputs
proofStr = proofStr.substring(64 * numPublicInputs);
} else {
proofStr = proofStr.substring(64 * numPublicInputs);
}

proofStr = "0x" + proofStr;
Expand Down
Loading