diff --git a/MIGRATION_DESIGN.md b/MIGRATION_DESIGN.md new file mode 100644 index 000000000000..7e5bbb0d675b --- /dev/null +++ b/MIGRATION_DESIGN.md @@ -0,0 +1,448 @@ +# BB CLI → BB.js Msgpack Native Backend Migration + +## Executive Summary + +**Current State**: BBNativeRollupProver spawns bb CLI processes and uses file-based I/O for all proof operations +**Target State**: Direct bb.js msgpack API calls with in-memory buffer passing - **ZERO file I/O for proving** + +**Impact**: +- Eliminate 196-280 file I/O operations per block +- Eliminate 28+ process spawns per block +- Expected 5-10× speedup for I/O-bound proving workloads + +--- + +## Current Architecture Analysis + +### File I/O Per Proof (Current) + +For **each proof** (×28 for full epoch): + +1. **Witness Generation (ACVM)**: + - Write: `partial-witness.gz` + - ACVM spawns, reads witness, generates partial witness + - Read: `partial-witness.gz` + +2. **Proof Generation (BB CLI)**: + - Write: `{circuit}-bytecode` (from artifact) + - Write: `{circuit}-vk` (from ServerCircuitVks) + - Spawn `bb prove` process + - BB reads: bytecode, VK, witness + - BB writes: `proof`, `public_inputs` + - Read: `proof`, `public_inputs` + +3. **Verification (BB CLI)**: + - Write: `proof`, `public_inputs`, `vk` + - Spawn `bb verify` process + - BB reads all three files + - Output: exit code + +4. **Cleanup**: + - Delete 5-7 temporary files + +**Total per proof**: ~7-10 file I/O operations, 2 process spawns + +--- + +## New Architecture Design + +### Core Principles + +1. **Singleton API Instance**: Create Barretenberg instance once, reuse for all operations +2. **Buffer-Based Flow**: Pass Uint8Array buffers, never write/read files +3. **In-Memory VK Cache**: VKs already available via ServerCircuitVks - no file I/O needed +4. **Keep ACVM Separate**: ACVM witness generation remains file-based (different binary) +5. **Eliminate execute.ts Layer**: Call bb.js msgpack API directly from BBProver + +### New Proof Flow + +```typescript +// 1. Initialize once (constructor) +private bbApi: Barretenberg; + +async init() { + this.bbApi = await Barretenberg.new({ + threads: this.config.bbThreads || 1, + bbPath: this.config.bbBinaryPath // Point to native bb binary + }); + // bb.js spawns: bb msgpack run --input + // Backend stays alive, accepts msgpack commands +} + +// 2. Per proof (no files!) +async generateProofWithBBMsgpack(input, circuitType, ...) { + // ACVM still needs partial witness file (unavoidable - different binary) + const witnessBuffer = await fs.readFile(outputWitnessFile); + + // Get bytecode from artifact (already in memory) + const artifact = getServerCircuitArtifact(circuitType); + const bytecode = Buffer.from(artifact.bytecode, 'base64'); + + // Get VK from cache (already in memory) + const vkData = this.getVerificationKeyDataForCircuit(circuitType); + const vk = vkData.keyAsBytes; + + // Generate proof via msgpack API - ALL IN MEMORY! + const { proof, publicInputs } = await this.bbApi.circuitProve({ + witness: witnessBuffer, + circuit: { + name: circuitType, + bytecode, + verificationKey: vk, // Provide VK = faster proving + }, + settings: getProofSettingsFromFlavor( + getUltraHonkFlavorForCircuit(circuitType) + ), + }); + + // proof and publicInputs are Uint8Array[] (arrays of 32-byte field elements) + // Convert to Aztec proof format + return convertMsgpackProofToRecursiveProof(proof, publicInputs, vkData, proofLength); +} + +// 3. Verification (no files!) +async verifyWithBBMsgpack(proof: Proof, vkData: VerificationKeyData, flavor: UltraHonkFlavor) { + // Convert Proof object to msgpack format + const { proofFields, publicInputFields } = convertProofToMsgpackFormat(proof); + + // Verify via msgpack API - ALL IN MEMORY! + const { verified } = await this.bbApi.circuitVerify({ + verificationKey: vkData.keyAsBytes, + publicInputs: publicInputFields, // Uint8Array[] + proof: proofFields, // Uint8Array[] + settings: getProofSettingsFromFlavor(flavor), + }); + + if (!verified) { + throw new ProvingError('Proof verification failed'); + } +} + +// 4. Cleanup (destructor) +async destroy() { + await this.bbApi?.destroy(); +} +``` + +--- + +## Detailed Migration Plan + +### Phase 1: Infrastructure Setup + +**Files to Create:** + +1. **`yarn-project/bb-prover/src/bb/msgpack_api.ts`** + ```typescript + /** + * Wrapper around bb.js msgpack API for Aztec protocol circuit proving. + * Handles buffer conversions and proof format translations. + */ + export class BBMsgpackProver { + constructor(private api: Barretenberg) {} + + async proveCircuit(...): Promise> { ... } + async verifyCircuit(...): Promise { ... } + + // Helper: Convert Aztec Proof ↔ Msgpack format + private toMsgpackProof(proof: Proof): { proof: Uint8Array[], publicInputs: Uint8Array[] } + private fromMsgpackProof(proof: Uint8Array[], publicInputs: Uint8Array[], ...): RecursiveProof + } + ``` + +2. **Update `yarn-project/bb-prover/src/config.ts`** + ```typescript + export interface BBConfig { + bbBinaryPath: string; + bbWorkingDirectory: string; // Still needed for ACVM witness temp files + bbThreads?: number; // NEW: thread count for bb.js + bbSkipCleanup: boolean; + } + ``` + +### Phase 2: Refactor BBNativeRollupProver + +**File: `yarn-project/bb-prover/src/prover/server/bb_prover.ts`** + +**Changes:** + +1. **Add bb.js API instance**: + ```typescript + export class BBNativeRollupProver implements ServerCircuitProver { + private bbApi!: Barretenberg; + private bbMsgpackProver!: BBMsgpackProver; + + static async new(config: BBProverConfig, telemetry: TelemetryClient) { + const prover = new BBNativeRollupProver(config, telemetry); + + // Initialize bb.js native backend + prover.bbApi = await Barretenberg.new({ + threads: config.bbThreads || 1, + bbPath: config.bbBinaryPath, + }); + + prover.bbMsgpackProver = new BBMsgpackProver(prover.bbApi); + + return prover; + } + } + ``` + +2. **Replace `generateProofWithBB`**: + ```typescript + private async generateProofWithBBMsgpack( + input: Input, + circuitType: ServerProtocolArtifact, + convertInput: (input: Input) => WitnessMap, + convertOutput: (outputWitness: WitnessMap) => Output, + workingDirectory: string, + ): Promise<{ circuitOutput: Output; proof: RecursiveProof }> { + // Still use ACVM for witness generation (different binary) + const outputWitnessFile = path.join(workingDirectory, 'partial-witness.gz'); + const simulator = new NativeACVMSimulator(...); + const witnessResult = await simulator.executeProtocolCircuit(...); + const output = convertOutput(witnessResult.witness); + + // Read witness buffer + const witnessBuffer = await fs.readFile(outputWitnessFile); + + // Get circuit data (in-memory) + const artifact = getServerCircuitArtifact(circuitType); + const vkData = this.getVerificationKeyDataForCircuit(circuitType); + + // Prove via msgpack - NO FILE I/O! + const proof = await this.bbMsgpackProver.proveCircuit( + witnessBuffer, + Buffer.from(artifact.bytecode, 'base64'), + vkData.keyAsBytes, + circuitType, + proofLength, + ); + + return { circuitOutput: output, proof }; + } + ``` + +3. **Replace `verifyWithKeyInternal`**: + ```typescript + private async verifyWithKeyMsgpack( + proof: Proof, + verificationKey: { keyAsBytes: Buffer }, + flavor: UltraHonkFlavor, + ) { + // Verify via msgpack - NO FILE I/O! + await this.bbMsgpackProver.verifyCircuit( + proof, + verificationKey.keyAsBytes, + flavor, + ); + + logger.info(`Successfully verified proof via msgpack API`); + } + ``` + +4. **Update `createRecursiveProof`** to call new methods: + ```typescript + private async createRecursiveProof<...>(...): Promise<{...}> { + const operation = async (bbWorkingDirectory: string) => { + // Use new msgpack method + const { proof, circuitOutput } = await this.generateProofWithBBMsgpack( + input, + circuitType, + convertInput, + convertOutput, + bbWorkingDirectory, + ); + + // No more readProofsFromOutputDirectory! + return { circuitOutput, proof }; + }; + + return await this.runInDirectory(operation); + } + ``` + +5. **Add cleanup**: + ```typescript + async destroy() { + await this.bbApi?.destroy(); + } + ``` + +### Phase 3: AVM Circuit Migration + +**File: `yarn-project/bb-prover/src/bb/execute.ts`** + +**Add msgpack version of AVM proving:** + +```typescript +export async function generateAvmProofMsgpack( + api: Barretenberg, + input: AvmCircuitInputs, + log: Logger, +): Promise<{ proof: Uint8Array[], vk: Uint8Array }> { + // Serialize inputs + const inputsBuffer = input.serializeWithMessagePack(); + + // Call AVM-specific msgpack command + const result = await api.avmProve({ + inputs: inputsBuffer, + }); + + return { + proof: result.proof, + vk: result.verificationKey, + }; +} + +export async function verifyAvmProofMsgpack( + api: Barretenberg, + proof: Uint8Array[], + publicInputs: AvmCircuitPublicInputs, + vk: Uint8Array, + log: Logger, +): Promise { + const { verified } = await api.avmVerify({ + proof, + publicInputs: publicInputs.serializeWithMessagePack(), + verificationKey: vk, + }); + + if (!verified) { + throw new Error('AVM proof verification failed'); + } +} +``` + +### Phase 4: Deprecation & Cleanup + +**After migration is complete and tested:** + +1. **Mark old methods as deprecated**: + ```typescript + /** @deprecated Use generateProofWithBBMsgpack instead */ + private async generateProofWithBB(...) { ... } + ``` + +2. **Remove file-based execute.ts functions**: + - Keep only msgpack versions + - Remove `executeBB`, `generateProof`, `verifyProof`, etc. + +3. **Simplify directory management**: + - `bbWorkingDirectory` only needed for ACVM witness temp files + - Fewer temp directories created + +--- + +## Testing Strategy + +### Unit Tests + +1. **Test proof format conversion**: + - Aztec Proof ↔ Msgpack Uint8Array[] conversion + - Field element packing/unpacking + +2. **Test msgpack API wrapper**: + - Mock Barretenberg API + - Verify correct parameters passed + +### Integration Tests + +1. **Single circuit proving**: + - Generate proof with msgpack API + - Verify with msgpack API + - Compare proof output with CLI version + +2. **Full rollup flow**: + - Run complete base → merge → root flow + - Verify all proofs validate + - Check orchestrator integration + +### Performance Tests + +1. **Benchmark file I/O elimination**: + - Measure proof generation time: CLI vs msgpack + - Measure full block proving: before vs after + - Expected: 5-10× speedup + +2. **Memory usage**: + - Monitor memory with single API instance + - Check for leaks in long-running tests + +--- + +## Migration Checklist + +- [ ] Create `msgpack_api.ts` wrapper +- [ ] Update `BBConfig` with new options +- [ ] Add Barretenberg instance to BBNativeRollupProver +- [ ] Implement `generateProofWithBBMsgpack` +- [ ] Implement `verifyWithKeyMsgpack` +- [ ] Refactor `createRecursiveProof` to use msgpack +- [ ] Refactor `verifyProof` to use msgpack +- [ ] Migrate AVM proving to msgpack +- [ ] Migrate AVM verification to msgpack +- [ ] Update BBNativePrivateKernelProver if needed +- [ ] Update BBVerifier if needed +- [ ] Add unit tests for buffer conversions +- [ ] Add integration tests for proof flow +- [ ] Run full e2e tests +- [ ] Benchmark performance improvements +- [ ] Deprecate old file-based methods +- [ ] Remove execute.ts CLI functions +- [ ] Update documentation + +--- + +## Benefits + +### Performance +- **Eliminate 196-280 file I/O operations** per epoch +- **Eliminate 28+ process spawns** per epoch +- **5-10× faster** proving for I/O-bound workloads +- **Reduced disk pressure** on proving infrastructure + +### Architecture +- **Cleaner separation**: bb.js handles all BB operations +- **Persistent backend**: One long-running bb process vs 28+ short-lived spawns +- **Easier debugging**: In-process communication, better error messages +- **Better resource management**: Thread pool reuse, memory efficiency + +### Maintainability +- **Fewer moving parts**: No file coordination, no temp directory cleanup +- **Type safety**: TypeScript types for all proof structures +- **Testability**: Mock bb.js API instead of mocking fs/process +- **Future-proof**: Native backend is the strategic direction + +--- + +## Risks & Mitigations + +### Risk: bb.js msgpack API bugs +**Mitigation**: +- Run both old and new paths in parallel initially +- Extensive integration testing +- Gradual rollout with feature flag + +### Risk: Memory leaks with persistent backend +**Mitigation**: +- Proper cleanup in `destroy()` +- Memory profiling in long-running tests +- Monitor production memory usage + +### Risk: ACVM still requires file I/O +**Mitigation**: +- Accept this limitation (different binary) +- Future: migrate ACVM to in-memory API when available +- Witness I/O is only 1-2 ops vs 7-10 total + +--- + +## Timeline Estimate + +1. **Phase 1 (Infrastructure)**: 1-2 days +2. **Phase 2 (BBProver Refactor)**: 2-3 days +3. **Phase 3 (AVM Migration)**: 1-2 days +4. **Phase 4 (Testing & Cleanup)**: 2-3 days + +**Total**: ~6-10 days for complete migration diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 000000000000..6a69523a01c4 --- /dev/null +++ b/TESTING.md @@ -0,0 +1,159 @@ +# Testing the BB CLI → Msgpack Migration + +## Unit Tests Created + +### `yarn-project/bb-prover/src/bb/msgpack_api.test.ts` + +Comprehensive unit tests for the msgpack buffer conversion logic that enables in-memory proof operations. + +**Test Coverage:** + +1. **`toMsgpackProof` format conversion** + - Verifies correct splitting of Aztec `Proof` buffer into: + - Public inputs (first N * 32 bytes) + - Proof fields (remaining bytes) + - Validates 32-byte field alignment + - Tests array chunking logic + +2. **`fromMsgpackProof` format conversion** + - Verifies reconstruction of Aztec `RecursiveProof` from field arrays + - Tests Buffer → Fr conversions + - Validates proof length matching + +3. **Round-trip conversion** + - Ensures data preservation through full to/from cycle + - Verifies buffer equality after conversion + - Tests with various proof sizes + +**What These Tests Validate:** +- Core buffer manipulation correctness +- Msgpack format compatibility +- No data loss during conversions +- Proper field element handling + +## Running Tests + +### Prerequisites + +Install dependencies from git root: + +```bash +cd $(git rev-parse --show-toplevel) +./bootstrap.sh +``` + +This will: +- Install all monorepo dependencies +- Link portal packages (noir, bb.js, etc.) +- Build required dependencies + +### Running Unit Tests + +```bash +cd yarn-project/bb-prover +yarn test msgpack_api.test.ts +``` + +### Running All BB-Prover Tests + +```bash +cd yarn-project/bb-prover +yarn test +``` + +**Note:** The bb-prover package includes integration tests (`avm_proving_tests/`) that: +- Require the full BB binary and ACVM +- Take significant time to run +- Test end-to-end proof generation + +## Test Strategy + +### Unit Tests (Fast) +- **File:** `msgpack_api.test.ts` +- **Runtime:** < 1 second +- **Coverage:** Buffer conversion logic +- **Dependencies:** Minimal (Fr, Proof classes only) + +### Integration Tests (Slow) +- **Files:** `avm_proving_tests/*.test.ts` +- **Runtime:** Minutes per test +- **Coverage:** Full proof generation pipeline +- **Dependencies:** BB binary, ACVM, full circuits + +## Validation Without Dependencies + +You can verify TypeScript correctness without installing dependencies: + +```bash +cd yarn-project/bb-prover +npx tsc --noEmit src/bb/msgpack_api.test.ts +``` + +Expected: Only dependency resolution errors (missing node_modules), no syntax errors. + +## What to Test After Migration + +1. **Unit Tests** + - ✅ Buffer conversion logic (msgpack_api.test.ts) + - Suggested: Add tests for error cases (invalid proof lengths, etc.) + +2. **Integration Tests** + - All existing AVM proving tests should pass unchanged + - Tests use public API (`BBNativeRollupProver`) which hasn't changed + - Internal implementation (CLI → msgpack) is transparent to tests + +3. **Performance Tests** + - Benchmark proof generation time: CLI vs msgpack + - Expected: 5-10× speedup for I/O-bound operations + - Measure file I/O count reduction + +## Migration Impact on Tests + +**No test changes required** because: +- `BBNativeRollupProver` interface unchanged +- All public methods work identically +- Only internal implementation changed (file I/O → msgpack) + +Existing tests automatically validate the msgpack implementation. + +## Test Execution Log + +Once dependencies are installed, running the unit tests should show: + +``` +PASS src/bb/msgpack_api.test.ts + BBMsgpackProver buffer conversions + toMsgpackProof format + ✓ should split proof into public inputs and proof fields correctly + fromMsgpackProof format + ✓ should reconstruct proof from field arrays correctly + ✓ should convert field buffers to Fr array correctly + round-trip conversion + ✓ should preserve proof data through to/from msgpack conversion + +Test Suites: 1 passed, 1 total +Tests: 4 passed, 4 total +``` + +## Recommended Test Workflow + +1. **First:** Run unit tests to validate buffer logic + ```bash + yarn test msgpack_api.test.ts + ``` + +2. **Then:** Run a single AVM integration test + ```bash + yarn test avm_minimal_proving.test.ts + ``` + +3. **Finally:** Run full test suite + ```bash + yarn test + ``` + +## CI/CD Considerations + +- Unit tests should run on every commit (fast feedback) +- Integration tests can run on PR/merge (slower but comprehensive) +- Consider adding performance regression tests for I/O metrics diff --git a/barretenberg/acir_tests/bootstrap.sh b/barretenberg/acir_tests/bootstrap.sh index 3cef6a018360..1bf53619e954 100755 --- a/barretenberg/acir_tests/bootstrap.sh +++ b/barretenberg/acir_tests/bootstrap.sh @@ -208,9 +208,11 @@ function test_cmds { } function bench_cmds { - local dir=$(realpath --relative-to=$root .) - echo "$tests_hash:CPUS=16 barretenberg/acir_tests/scripts/run_bench.sh ultra_honk_rec_wasm_memory" \ - "'scripts/bbjs_legacy_cli_prove.sh verify_honk_proof'" + return + # TODO: We no longer have a bb.js cli. Recreate this benchmark another way? + # local dir=$(realpath --relative-to=$root .) + # echo "$tests_hash:CPUS=16 barretenberg/acir_tests/scripts/run_bench.sh ultra_honk_rec_wasm_memory" \ + # "'scripts/bbjs_legacy_cli_prove.sh verify_honk_proof'" } # TODO(https://github.com/AztecProtocol/barretenberg/issues/1254): More complete testing, including failure tests diff --git a/barretenberg/bootstrap.sh b/barretenberg/bootstrap.sh index ac48877abdb6..bc5f9c841ac2 100755 --- a/barretenberg/bootstrap.sh +++ b/barretenberg/bootstrap.sh @@ -36,4 +36,3 @@ case "$cmd" in exit 1 ;; esac - diff --git a/barretenberg/cpp/bootstrap.sh b/barretenberg/cpp/bootstrap.sh index 60d4d0b23e76..f4d9c3dc61f9 100755 --- a/barretenberg/cpp/bootstrap.sh +++ b/barretenberg/cpp/bootstrap.sh @@ -244,12 +244,12 @@ function build { builds+=(build_gcc_syntax_check_only build_fuzzing_syntax_check_only build_asan_fast) fi if [ "$(arch)" == "amd64" ] && [ "$CI_FULL" -eq 1 ]; then - builds+=(build_smt_verification) + builds+=(build_darwin_arm64 build_smt_verification) fi if semver check "$REF_NAME" && [[ "$(arch)" == "amd64" ]]; then # macOS builds require the avm-transpiler linked. # We build them using zig cross-compilation. - builds+=(build_darwin_arm64 build_darwin_amd64) + builds+=(build_darwin_amd64) fi parallel --line-buffered --tag --halt now,fail=1 denoise {} ::: ${builds[@]} build_release diff --git a/barretenberg/cpp/src/barretenberg/api/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/api/CMakeLists.txt index 363208fef964..54fec8912cc0 100644 --- a/barretenberg/cpp/src/barretenberg/api/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/api/CMakeLists.txt @@ -4,3 +4,7 @@ barretenberg_module(api client_ivc bbapi dsl libdeflate::libdeflate_static nlohm if(AVM_TRANSPILER_LIB) target_link_libraries(api_objects PRIVATE avm_transpiler) endif() + +if(NOT WASM) + target_link_libraries(api_objects PRIVATE ipc) +endif() diff --git a/barretenberg/cpp/src/barretenberg/bbapi/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/bbapi/CMakeLists.txt index 412521c1fa25..9786eec2998d 100644 --- a/barretenberg/cpp/src/barretenberg/bbapi/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/bbapi/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(bbapi common client_ivc dsl) +barretenberg_module(bbapi common client_ivc dsl crypto_poseidon2 crypto_pedersen_commitment crypto_pedersen_hash crypto_blake2s crypto_aes128 crypto_schnorr crypto_ecdsa ecc srs) diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.cpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.cpp index 1b5748838cee..3d036f144887 100644 --- a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.cpp +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.cpp @@ -1,12 +1,97 @@ -#include "bbapi_crypto.hpp" +/** + * @file bbapi_crypto.cpp + * @brief Implementation of cryptographic command execution for the Barretenberg RPC API + */ +#include "barretenberg/bbapi/bbapi_crypto.hpp" +#include "barretenberg/common/throw_or_abort.hpp" +#include "barretenberg/crypto/aes128/aes128.hpp" +#include "barretenberg/crypto/blake2s/blake2s.hpp" +#include "barretenberg/crypto/pedersen_commitment/pedersen.hpp" +#include "barretenberg/crypto/pedersen_hash/pedersen.hpp" #include "barretenberg/crypto/poseidon2/poseidon2.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/crypto/poseidon2/poseidon2_permutation.hpp" namespace bb::bbapi { -Poseidon2Hash::Response Poseidon2Hash::execute(BBApiRequest& request) && +Poseidon2Hash::Response Poseidon2Hash::execute(BB_UNUSED BBApiRequest& request) && { - (void)request; return { crypto::Poseidon2::hash(inputs) }; } + +Poseidon2Permutation::Response Poseidon2Permutation::execute(BB_UNUSED BBApiRequest& request) && +{ + using Permutation = crypto::Poseidon2Permutation; + + // inputs is already std::array, direct use + return { Permutation::permutation(inputs) }; +} + +Poseidon2HashAccumulate::Response Poseidon2HashAccumulate::execute(BB_UNUSED BBApiRequest& request) && +{ + if (inputs.empty()) { + throw_or_abort("Poseidon2HashAccumulate requires at least one input"); + } + + fr result = inputs[0]; + for (size_t i = 1; i < inputs.size(); ++i) { + result = crypto::Poseidon2::hash({ inputs[i], result }); + } + + return { result }; +} + +PedersenCommit::Response PedersenCommit::execute(BB_UNUSED BBApiRequest& request) && +{ + crypto::GeneratorContext ctx; + ctx.offset = static_cast(hash_index); + return { crypto::pedersen_commitment::commit_native(inputs, ctx) }; +} + +PedersenHash::Response PedersenHash::execute(BB_UNUSED BBApiRequest& request) && +{ + crypto::GeneratorContext ctx; + ctx.offset = static_cast(hash_index); + return { crypto::pedersen_hash::hash(inputs, ctx) }; +} + +PedersenHashBuffer::Response PedersenHashBuffer::execute(BB_UNUSED BBApiRequest& request) && +{ + crypto::GeneratorContext ctx; + ctx.offset = static_cast(hash_index); + return { crypto::pedersen_hash::hash_buffer(input, ctx) }; +} + +Blake2s::Response Blake2s::execute(BB_UNUSED BBApiRequest& request) && +{ + return { crypto::blake2s(data) }; +} + +Blake2sToField::Response Blake2sToField::execute(BB_UNUSED BBApiRequest& request) && +{ + auto hash_result = crypto::blake2s(data); + return { fr::serialize_from_buffer(hash_result.data()) }; +} + +AesEncrypt::Response AesEncrypt::execute(BB_UNUSED BBApiRequest& request) && +{ + // Copy plaintext as AES encrypts in-place + std::vector result = plaintext; + result.resize(length); + + crypto::aes128_encrypt_buffer_cbc(result.data(), iv.data(), key.data(), length); + + return { std::move(result) }; +} + +AesDecrypt::Response AesDecrypt::execute(BB_UNUSED BBApiRequest& request) && +{ + // Copy ciphertext as AES decrypts in-place + std::vector result = ciphertext; + result.resize(length); + + crypto::aes128_decrypt_buffer_cbc(result.data(), iv.data(), key.data(), length); + + return { std::move(result) }; +} + } // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.hpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.hpp index a557dbe4dd31..29d00c090dcc 100644 --- a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.hpp +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_crypto.hpp @@ -1,14 +1,17 @@ #pragma once /** * @file bbapi_crypto.hpp - * @brief Cryptographic primitive commands for the Barretenberg RPC API. + * @brief Cryptographic primitives command definitions for the Barretenberg RPC API. * - * This file contains command structures for direct access to cryptographic - * primitives like hash functions. + * This file contains command structures for cryptographic operations including + * Poseidon2, Pedersen, Blake2s, and AES. */ #include "barretenberg/bbapi/bbapi_shared.hpp" #include "barretenberg/common/named_union.hpp" +#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" #include "barretenberg/serialize/msgpack.hpp" +#include #include #include @@ -34,4 +37,193 @@ struct Poseidon2Hash { bool operator==(const Poseidon2Hash&) const = default; }; +/** + * @struct Poseidon2Permutation + * @brief Compute Poseidon2 permutation on state (4 field elements) + */ +struct Poseidon2Permutation { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Poseidon2Permutation"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Poseidon2PermutationResponse"; + std::array outputs; + MSGPACK_FIELDS(outputs); + bool operator==(const Response&) const = default; + }; + + std::array inputs; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(inputs); + bool operator==(const Poseidon2Permutation&) const = default; +}; + +/** + * @struct Poseidon2HashAccumulate + * @brief Compute accumulated Poseidon2 hash + */ +struct Poseidon2HashAccumulate { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Poseidon2HashAccumulate"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Poseidon2HashAccumulateResponse"; + fr hash; + MSGPACK_FIELDS(hash); + bool operator==(const Response&) const = default; + }; + + std::vector inputs; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(inputs); + bool operator==(const Poseidon2HashAccumulate&) const = default; +}; + +/** + * @struct PedersenCommit + * @brief Compute Pedersen commitment to field elements + */ +struct PedersenCommit { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "PedersenCommit"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "PedersenCommitResponse"; + grumpkin::g1::affine_element point; + MSGPACK_FIELDS(point); + bool operator==(const Response&) const = default; + }; + + std::vector inputs; + uint32_t hash_index; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(inputs, hash_index); + bool operator==(const PedersenCommit&) const = default; +}; + +/** + * @struct PedersenHash + * @brief Compute Pedersen hash of field elements + */ +struct PedersenHash { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "PedersenHash"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "PedersenHashResponse"; + grumpkin::fq hash; + MSGPACK_FIELDS(hash); + bool operator==(const Response&) const = default; + }; + + std::vector inputs; + uint32_t hash_index; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(inputs, hash_index); + bool operator==(const PedersenHash&) const = default; +}; + +/** + * @struct PedersenHashBuffer + * @brief Compute Pedersen hash of raw buffer + */ +struct PedersenHashBuffer { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "PedersenHashBuffer"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "PedersenHashBufferResponse"; + grumpkin::fq hash; + MSGPACK_FIELDS(hash); + bool operator==(const Response&) const = default; + }; + + std::vector input; + uint32_t hash_index; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(input, hash_index); + bool operator==(const PedersenHashBuffer&) const = default; +}; + +/** + * @struct Blake2s + * @brief Compute Blake2s hash + */ +struct Blake2s { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Blake2s"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Blake2sResponse"; + std::array hash; + MSGPACK_FIELDS(hash); + bool operator==(const Response&) const = default; + }; + + std::vector data; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(data); + bool operator==(const Blake2s&) const = default; +}; + +/** + * @struct Blake2sToField + * @brief Compute Blake2s hash and convert to field element + */ +struct Blake2sToField { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Blake2sToField"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Blake2sToFieldResponse"; + fr field; + MSGPACK_FIELDS(field); + bool operator==(const Response&) const = default; + }; + + std::vector data; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(data); + bool operator==(const Blake2sToField&) const = default; +}; + +/** + * @struct AesEncrypt + * @brief AES-128 CBC encryption + */ +struct AesEncrypt { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "AesEncrypt"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "AesEncryptResponse"; + std::vector ciphertext; + MSGPACK_FIELDS(ciphertext); + bool operator==(const Response&) const = default; + }; + + std::vector plaintext; + std::array iv; + std::array key; + uint32_t length; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(plaintext, iv, key, length); + bool operator==(const AesEncrypt&) const = default; +}; + +/** + * @struct AesDecrypt + * @brief AES-128 CBC decryption + */ +struct AesDecrypt { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "AesDecrypt"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "AesDecryptResponse"; + std::vector plaintext; + MSGPACK_FIELDS(plaintext); + bool operator==(const Response&) const = default; + }; + + std::vector ciphertext; + std::array iv; + std::array key; + uint32_t length; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(ciphertext, iv, key, length); + bool operator==(const AesDecrypt&) const = default; +}; + } // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecc.cpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecc.cpp new file mode 100644 index 000000000000..7b767ce56dcf --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecc.cpp @@ -0,0 +1,62 @@ +/** + * @file bbapi_ecc.cpp + * @brief Implementation of elliptic curve command execution for the Barretenberg RPC API + */ +#include "barretenberg/bbapi/bbapi_ecc.hpp" + +namespace bb::bbapi { + +GrumpkinMul::Response GrumpkinMul::execute(BB_UNUSED BBApiRequest& request) && +{ + return { point * scalar }; +} + +GrumpkinAdd::Response GrumpkinAdd::execute(BB_UNUSED BBApiRequest& request) && +{ + return { point_a + point_b }; +} + +GrumpkinBatchMul::Response GrumpkinBatchMul::execute(BB_UNUSED BBApiRequest& request) && +{ + auto output = grumpkin::g1::element::batch_mul_with_endomorphism(points, scalar); + return { std::move(output) }; +} + +GrumpkinGetRandomFr::Response GrumpkinGetRandomFr::execute(BB_UNUSED BBApiRequest& request) && +{ + return { bb::fr::random_element() }; +} + +GrumpkinReduce512::Response GrumpkinReduce512::execute(BB_UNUSED BBApiRequest& request) && +{ + auto bigint_input = from_buffer(input.data()); + uint512_t barretenberg_modulus(bb::fr::modulus); + uint512_t target_output = bigint_input % barretenberg_modulus; + return { bb::fr(target_output.lo) }; +} + +Secp256k1Mul::Response Secp256k1Mul::execute(BB_UNUSED BBApiRequest& request) && +{ + return { point * scalar }; +} + +Secp256k1GetRandomFr::Response Secp256k1GetRandomFr::execute(BB_UNUSED BBApiRequest& request) && +{ + return { secp256k1::fr::random_element() }; +} + +Secp256k1Reduce512::Response Secp256k1Reduce512::execute(BB_UNUSED BBApiRequest& request) && +{ + auto bigint_input = from_buffer(input.data()); + uint512_t secp256k1_modulus(secp256k1::fr::modulus); + uint512_t target_output = bigint_input % secp256k1_modulus; + return { secp256k1::fr(target_output.lo) }; +} + +Bn254FrSqrt::Response Bn254FrSqrt::execute(BB_UNUSED BBApiRequest& request) && +{ + auto [is_sqr, root] = input.sqrt(); + return { is_sqr, root }; +} + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecc.hpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecc.hpp new file mode 100644 index 000000000000..61ac730aba8b --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecc.hpp @@ -0,0 +1,208 @@ +#pragma once +/** + * @file bbapi_ecc.hpp + * @brief Elliptic curve operations command definitions for the Barretenberg RPC API. + * + * This file contains command structures for elliptic curve operations including + * Grumpkin, Secp256k1, and BN254 field operations. + */ +#include "barretenberg/bbapi/bbapi_shared.hpp" +#include "barretenberg/common/named_union.hpp" +#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" +#include "barretenberg/ecc/curves/secp256k1/secp256k1.hpp" +#include "barretenberg/serialize/msgpack.hpp" +#include +#include +#include + +namespace bb::bbapi { + +/** + * @struct GrumpkinMul + * @brief Multiply a Grumpkin point by a scalar + */ +struct GrumpkinMul { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinMul"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinMulResponse"; + grumpkin::g1::affine_element point; + MSGPACK_FIELDS(point); + bool operator==(const Response&) const = default; + }; + + grumpkin::g1::affine_element point; + grumpkin::fr scalar; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(point, scalar); + bool operator==(const GrumpkinMul&) const = default; +}; + +/** + * @struct GrumpkinAdd + * @brief Add two Grumpkin points + */ +struct GrumpkinAdd { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinAdd"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinAddResponse"; + grumpkin::g1::affine_element point; + MSGPACK_FIELDS(point); + bool operator==(const Response&) const = default; + }; + + grumpkin::g1::affine_element point_a; + grumpkin::g1::affine_element point_b; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(point_a, point_b); + bool operator==(const GrumpkinAdd&) const = default; +}; + +/** + * @struct GrumpkinBatchMul + * @brief Multiply multiple Grumpkin points by a single scalar + */ +struct GrumpkinBatchMul { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinBatchMul"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinBatchMulResponse"; + std::vector points; + MSGPACK_FIELDS(points); + bool operator==(const Response&) const = default; + }; + + std::vector points; + grumpkin::fr scalar; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(points, scalar); + bool operator==(const GrumpkinBatchMul&) const = default; +}; + +/** + * @struct GrumpkinGetRandomFr + * @brief Get a random Grumpkin field element (BN254 Fr) + */ +struct GrumpkinGetRandomFr { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinGetRandomFr"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinGetRandomFrResponse"; + bb::fr value; + MSGPACK_FIELDS(value); + bool operator==(const Response&) const = default; + }; + + // Empty struct for commands with no input - use a dummy field for msgpack + uint8_t dummy = 0; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(dummy); + bool operator==(const GrumpkinGetRandomFr&) const = default; +}; + +/** + * @struct GrumpkinReduce512 + * @brief Reduce a 512-bit value modulo Grumpkin scalar field (BN254 Fr) + */ +struct GrumpkinReduce512 { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinReduce512"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "GrumpkinReduce512Response"; + bb::fr value; + MSGPACK_FIELDS(value); + bool operator==(const Response&) const = default; + }; + + std::array input; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(input); + bool operator==(const GrumpkinReduce512&) const = default; +}; + +/** + * @struct Secp256k1Mul + * @brief Multiply a Secp256k1 point by a scalar + */ +struct Secp256k1Mul { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Secp256k1Mul"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Secp256k1MulResponse"; + secp256k1::g1::affine_element point; + MSGPACK_FIELDS(point); + bool operator==(const Response&) const = default; + }; + + secp256k1::g1::affine_element point; + secp256k1::fr scalar; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(point, scalar); + bool operator==(const Secp256k1Mul&) const = default; +}; + +/** + * @struct Secp256k1GetRandomFr + * @brief Get a random Secp256k1 field element + */ +struct Secp256k1GetRandomFr { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Secp256k1GetRandomFr"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Secp256k1GetRandomFrResponse"; + secp256k1::fr value; + MSGPACK_FIELDS(value); + bool operator==(const Response&) const = default; + }; + + // Empty struct for commands with no input - use a dummy field for msgpack + uint8_t dummy = 0; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(dummy); + bool operator==(const Secp256k1GetRandomFr&) const = default; +}; + +/** + * @struct Secp256k1Reduce512 + * @brief Reduce a 512-bit value modulo Secp256k1 scalar field + */ +struct Secp256k1Reduce512 { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Secp256k1Reduce512"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Secp256k1Reduce512Response"; + secp256k1::fr value; + MSGPACK_FIELDS(value); + bool operator==(const Response&) const = default; + }; + + std::array input; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(input); + bool operator==(const Secp256k1Reduce512&) const = default; +}; + +/** + * @struct Bn254FrSqrt + * @brief Compute square root of a BN254 field element + */ +struct Bn254FrSqrt { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Bn254FrSqrt"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "Bn254FrSqrtResponse"; + bool is_square_root; + bb::fr value; + MSGPACK_FIELDS(is_square_root, value); + bool operator==(const Response&) const = default; + }; + + bb::fr input; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(input); + bool operator==(const Bn254FrSqrt&) const = default; +}; + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecdsa.cpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecdsa.cpp new file mode 100644 index 000000000000..c664e74d65a5 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecdsa.cpp @@ -0,0 +1,78 @@ +/** + * @file bbapi_ecdsa.cpp + * @brief Implementation of ECDSA signature command execution for the Barretenberg RPC API + */ +#include "barretenberg/bbapi/bbapi_ecdsa.hpp" +#include "barretenberg/common/throw_or_abort.hpp" + +namespace bb::bbapi { + +// Secp256k1 implementations +EcdsaSecp256k1ComputePublicKey::Response EcdsaSecp256k1ComputePublicKey::execute(BB_UNUSED BBApiRequest& request) && +{ + return { secp256k1::g1::one * private_key }; +} + +EcdsaSecp256k1ConstructSignature::Response EcdsaSecp256k1ConstructSignature::execute(BB_UNUSED BBApiRequest& request) && +{ + auto pub_key = secp256k1::g1::one * private_key; + crypto::ecdsa_key_pair key_pair = { private_key, pub_key }; + + std::string message_str(reinterpret_cast(message.data()), message.size()); + auto sig = crypto::ecdsa_construct_signature( + message_str, key_pair); + + return { sig.r, sig.s, sig.v }; +} + +EcdsaSecp256k1RecoverPublicKey::Response EcdsaSecp256k1RecoverPublicKey::execute(BB_UNUSED BBApiRequest& request) && +{ + crypto::ecdsa_signature sig = { r, s, v }; + std::string message_str(reinterpret_cast(message.data()), message.size()); + return { crypto::ecdsa_recover_public_key( + message_str, sig) }; +} + +EcdsaSecp256k1VerifySignature::Response EcdsaSecp256k1VerifySignature::execute(BB_UNUSED BBApiRequest& request) && +{ + crypto::ecdsa_signature sig = { r, s, v }; + std::string message_str(reinterpret_cast(message.data()), message.size()); + return { crypto::ecdsa_verify_signature( + message_str, public_key, sig) }; +} + +// Secp256r1 implementations +EcdsaSecp256r1ComputePublicKey::Response EcdsaSecp256r1ComputePublicKey::execute(BB_UNUSED BBApiRequest& request) && +{ + return { secp256r1::g1::one * private_key }; +} + +EcdsaSecp256r1ConstructSignature::Response EcdsaSecp256r1ConstructSignature::execute(BB_UNUSED BBApiRequest& request) && +{ + auto pub_key = secp256r1::g1::one * private_key; + crypto::ecdsa_key_pair key_pair = { private_key, pub_key }; + + std::string message_str(reinterpret_cast(message.data()), message.size()); + auto sig = crypto::ecdsa_construct_signature( + message_str, key_pair); + + return { sig.r, sig.s, sig.v }; +} + +EcdsaSecp256r1RecoverPublicKey::Response EcdsaSecp256r1RecoverPublicKey::execute(BB_UNUSED BBApiRequest& request) && +{ + crypto::ecdsa_signature sig = { r, s, v }; + std::string message_str(reinterpret_cast(message.data()), message.size()); + return { crypto::ecdsa_recover_public_key( + message_str, sig) }; +} + +EcdsaSecp256r1VerifySignature::Response EcdsaSecp256r1VerifySignature::execute(BB_UNUSED BBApiRequest& request) && +{ + crypto::ecdsa_signature sig = { r, s, v }; + std::string message_str(reinterpret_cast(message.data()), message.size()); + return { crypto::ecdsa_verify_signature( + message_str, public_key, sig) }; +} + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecdsa.hpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecdsa.hpp new file mode 100644 index 000000000000..af1bdd0cb0f2 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ecdsa.hpp @@ -0,0 +1,202 @@ +#pragma once +/** + * @file bbapi_ecdsa.hpp + * @brief ECDSA signature command definitions for the Barretenberg RPC API. + * + * This file contains command structures for ECDSA signature operations + * on Secp256k1 and Secp256r1 curves. + */ +#include "barretenberg/bbapi/bbapi_shared.hpp" +#include "barretenberg/common/named_union.hpp" +#include "barretenberg/crypto/ecdsa/ecdsa.hpp" +#include "barretenberg/ecc/curves/secp256k1/secp256k1.hpp" +#include "barretenberg/ecc/curves/secp256r1/secp256r1.hpp" +#include "barretenberg/serialize/msgpack.hpp" +#include +#include +#include +#include + +namespace bb::bbapi { + +/** + * @struct EcdsaSecp256k1ComputePublicKey + * @brief Compute ECDSA public key from private key for secp256k1 + */ +struct EcdsaSecp256k1ComputePublicKey { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1ComputePublicKey"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1ComputePublicKeyResponse"; + secp256k1::g1::affine_element public_key; + MSGPACK_FIELDS(public_key); + bool operator==(const Response&) const = default; + }; + + secp256k1::fr private_key; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(private_key); + bool operator==(const EcdsaSecp256k1ComputePublicKey&) const = default; +}; + +/** + * @struct EcdsaSecp256r1ComputePublicKey + * @brief Compute ECDSA public key from private key for secp256r1 + */ +struct EcdsaSecp256r1ComputePublicKey { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1ComputePublicKey"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1ComputePublicKeyResponse"; + secp256r1::g1::affine_element public_key; + MSGPACK_FIELDS(public_key); + bool operator==(const Response&) const = default; + }; + + secp256r1::fr private_key; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(private_key); + bool operator==(const EcdsaSecp256r1ComputePublicKey&) const = default; +}; + +/** + * @struct EcdsaSecp256k1ConstructSignature + * @brief Construct an ECDSA signature for secp256k1 + */ +struct EcdsaSecp256k1ConstructSignature { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1ConstructSignature"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1ConstructSignatureResponse"; + std::array r; + std::array s; + uint8_t v; + MSGPACK_FIELDS(r, s, v); + bool operator==(const Response&) const = default; + }; + + std::vector message; + secp256k1::fr private_key; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, private_key); + bool operator==(const EcdsaSecp256k1ConstructSignature&) const = default; +}; + +/** + * @struct EcdsaSecp256r1ConstructSignature + * @brief Construct an ECDSA signature for secp256r1 + */ +struct EcdsaSecp256r1ConstructSignature { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1ConstructSignature"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1ConstructSignatureResponse"; + std::array r; + std::array s; + uint8_t v; + MSGPACK_FIELDS(r, s, v); + bool operator==(const Response&) const = default; + }; + + std::vector message; + secp256r1::fr private_key; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, private_key); + bool operator==(const EcdsaSecp256r1ConstructSignature&) const = default; +}; + +/** + * @struct EcdsaSecp256k1RecoverPublicKey + * @brief Recover public key from ECDSA signature for secp256k1 + */ +struct EcdsaSecp256k1RecoverPublicKey { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1RecoverPublicKey"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1RecoverPublicKeyResponse"; + secp256k1::g1::affine_element public_key; + MSGPACK_FIELDS(public_key); + bool operator==(const Response&) const = default; + }; + + std::vector message; + std::array r; + std::array s; + uint8_t v; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, r, s, v); + bool operator==(const EcdsaSecp256k1RecoverPublicKey&) const = default; +}; + +/** + * @struct EcdsaSecp256r1RecoverPublicKey + * @brief Recover public key from ECDSA signature for secp256r1 + */ +struct EcdsaSecp256r1RecoverPublicKey { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1RecoverPublicKey"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1RecoverPublicKeyResponse"; + secp256r1::g1::affine_element public_key; + MSGPACK_FIELDS(public_key); + bool operator==(const Response&) const = default; + }; + + std::vector message; + std::array r; + std::array s; + uint8_t v; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, r, s, v); + bool operator==(const EcdsaSecp256r1RecoverPublicKey&) const = default; +}; + +/** + * @struct EcdsaSecp256k1VerifySignature + * @brief Verify an ECDSA signature for secp256k1 + */ +struct EcdsaSecp256k1VerifySignature { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1VerifySignature"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256k1VerifySignatureResponse"; + bool verified; + MSGPACK_FIELDS(verified); + bool operator==(const Response&) const = default; + }; + + std::vector message; + secp256k1::g1::affine_element public_key; + std::array r; + std::array s; + uint8_t v; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, public_key, r, s, v); + bool operator==(const EcdsaSecp256k1VerifySignature&) const = default; +}; + +/** + * @struct EcdsaSecp256r1VerifySignature + * @brief Verify an ECDSA signature for secp256r1 + */ +struct EcdsaSecp256r1VerifySignature { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1VerifySignature"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "EcdsaSecp256r1VerifySignatureResponse"; + bool verified; + MSGPACK_FIELDS(verified); + bool operator==(const Response&) const = default; + }; + + std::vector message; + secp256r1::g1::affine_element public_key; + std::array r; + std::array s; + uint8_t v; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, public_key, r, s, v); + bool operator==(const EcdsaSecp256r1VerifySignature&) const = default; +}; + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_execute.hpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_execute.hpp index 2ae04f6044b5..56ad174dfd41 100644 --- a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_execute.hpp +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_execute.hpp @@ -2,7 +2,11 @@ #include "barretenberg/bbapi/bbapi_client_ivc.hpp" #include "barretenberg/bbapi/bbapi_crypto.hpp" +#include "barretenberg/bbapi/bbapi_ecc.hpp" +#include "barretenberg/bbapi/bbapi_ecdsa.hpp" +#include "barretenberg/bbapi/bbapi_schnorr.hpp" #include "barretenberg/bbapi/bbapi_shared.hpp" +#include "barretenberg/bbapi/bbapi_srs.hpp" #include "barretenberg/bbapi/bbapi_ultra_honk.hpp" #include "barretenberg/common/throw_or_abort.hpp" #include @@ -21,10 +25,42 @@ using Command = NamedUnion; using CommandResponse = NamedUnion; /** diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_schnorr.cpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_schnorr.cpp new file mode 100644 index 000000000000..31bc6acc51fc --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_schnorr.cpp @@ -0,0 +1,36 @@ +/** + * @file bbapi_schnorr.cpp + * @brief Implementation of Schnorr signature command execution for the Barretenberg RPC API + */ +#include "barretenberg/bbapi/bbapi_schnorr.hpp" + +namespace bb::bbapi { + +SchnorrComputePublicKey::Response SchnorrComputePublicKey::execute(BB_UNUSED BBApiRequest& request) && +{ + return { grumpkin::g1::one * private_key }; +} + +SchnorrConstructSignature::Response SchnorrConstructSignature::execute(BB_UNUSED BBApiRequest& request) && +{ + grumpkin::g1::affine_element pub_key = grumpkin::g1::one * private_key; + crypto::schnorr_key_pair key_pair = { private_key, pub_key }; + + std::string message_str(reinterpret_cast(message.data()), message.size()); + auto sig = crypto::schnorr_construct_signature(message_str, key_pair); + + return { sig.s, sig.e }; +} + +SchnorrVerifySignature::Response SchnorrVerifySignature::execute(BB_UNUSED BBApiRequest& request) && +{ + std::string message_str(reinterpret_cast(message.data()), message.size()); + crypto::schnorr_signature sig = { s, e }; + + bool result = crypto::schnorr_verify_signature( + message_str, public_key, sig); + + return { result }; +} + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_schnorr.hpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_schnorr.hpp new file mode 100644 index 000000000000..3cc691d1c322 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_schnorr.hpp @@ -0,0 +1,85 @@ +#pragma once +/** + * @file bbapi_schnorr.hpp + * @brief Schnorr signature command definitions for the Barretenberg RPC API. + * + * This file contains command structures for Schnorr signature operations + * on the Grumpkin curve. + */ +#include "barretenberg/bbapi/bbapi_shared.hpp" +#include "barretenberg/common/named_union.hpp" +#include "barretenberg/crypto/schnorr/schnorr.hpp" +#include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" +#include "barretenberg/serialize/msgpack.hpp" +#include +#include +#include + +namespace bb::bbapi { + +/** + * @struct SchnorrComputePublicKey + * @brief Compute Schnorr public key from private key + */ +struct SchnorrComputePublicKey { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SchnorrComputePublicKey"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SchnorrComputePublicKeyResponse"; + grumpkin::g1::affine_element public_key; + MSGPACK_FIELDS(public_key); + bool operator==(const Response&) const = default; + }; + + grumpkin::fr private_key; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(private_key); + bool operator==(const SchnorrComputePublicKey&) const = default; +}; + +/** + * @struct SchnorrConstructSignature + * @brief Construct a Schnorr signature + */ +struct SchnorrConstructSignature { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SchnorrConstructSignature"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SchnorrConstructSignatureResponse"; + std::array s; + std::array e; + MSGPACK_FIELDS(s, e); + bool operator==(const Response&) const = default; + }; + + std::vector message; // Variable length + grumpkin::fr private_key; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, private_key); + bool operator==(const SchnorrConstructSignature&) const = default; +}; + +/** + * @struct SchnorrVerifySignature + * @brief Verify a Schnorr signature + */ +struct SchnorrVerifySignature { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SchnorrVerifySignature"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SchnorrVerifySignatureResponse"; + bool verified; + MSGPACK_FIELDS(verified); + bool operator==(const Response&) const = default; + }; + + std::vector message; + grumpkin::g1::affine_element public_key; + std::array s; + std::array e; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(message, public_key, s, e); + bool operator==(const SchnorrVerifySignature&) const = default; +}; + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_srs.cpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_srs.cpp new file mode 100644 index 000000000000..f43d385f31c4 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_srs.cpp @@ -0,0 +1,46 @@ +/** + * @file bbapi_srs.cpp + * @brief Implementation of SRS initialization command execution for the Barretenberg RPC API + */ +#include "barretenberg/bbapi/bbapi_srs.hpp" +#include "barretenberg/common/serialize.hpp" +#include "barretenberg/ecc/curves/bn254/g1.hpp" +#include "barretenberg/ecc/curves/bn254/g2.hpp" +#include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" +#include "barretenberg/srs/global_crs.hpp" + +namespace bb::bbapi { + +SrsInitSrs::Response SrsInitSrs::execute(BB_UNUSED BBApiRequest& request) && +{ + // Parse G1 points from buffer (64 bytes each) + std::vector g1_points(num_points); + for (size_t i = 0; i < num_points; ++i) { + g1_points[i] = from_buffer(points_buf.data(), i * 64); + } + + // Parse G2 point from buffer (128 bytes) + auto g2_point_elem = from_buffer(g2_point.data()); + + // Initialize BN254 SRS + bb::srs::init_bn254_mem_crs_factory(g1_points, g2_point_elem); + + return {}; +} + +SrsInitGrumpkinSrs::Response SrsInitGrumpkinSrs::execute(BB_UNUSED BBApiRequest& request) && +{ + // Parse Grumpkin affine elements from buffer + std::vector points(num_points); + for (uint32_t i = 0; i < num_points; ++i) { + points[i] = + from_buffer(points_buf.data(), i * sizeof(curve::Grumpkin::AffineElement)); + } + + // Initialize Grumpkin SRS + bb::srs::init_grumpkin_mem_crs_factory(points); + + return {}; +} + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_srs.hpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_srs.hpp new file mode 100644 index 000000000000..0d4faca8fd2b --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_srs.hpp @@ -0,0 +1,59 @@ +#pragma once +/** + * @file bbapi_srs.hpp + * @brief SRS (Structured Reference String) initialization command definitions for the Barretenberg RPC API. + * + * This file contains command structures for initializing BN254 and Grumpkin SRS. + */ +#include "barretenberg/bbapi/bbapi_shared.hpp" +#include "barretenberg/common/named_union.hpp" +#include "barretenberg/serialize/msgpack.hpp" +#include +#include + +namespace bb::bbapi { + +/** + * @struct SrsInitSrs + * @brief Initialize BN254 SRS with G1 and G2 points + */ +struct SrsInitSrs { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SrsInitSrs"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SrsInitSrsResponse"; + uint8_t dummy = 0; // Empty response needs a dummy field for msgpack + MSGPACK_FIELDS(dummy); + bool operator==(const Response&) const = default; + }; + + std::vector points_buf; // G1 points (64 bytes each) + uint32_t num_points; + std::vector g2_point; // G2 point (128 bytes) + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(points_buf, num_points, g2_point); + bool operator==(const SrsInitSrs&) const = default; +}; + +/** + * @struct SrsInitGrumpkinSrs + * @brief Initialize Grumpkin SRS with Grumpkin points + */ +struct SrsInitGrumpkinSrs { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SrsInitGrumpkinSrs"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "SrsInitGrumpkinSrsResponse"; + uint8_t dummy = 0; // Empty response needs a dummy field for msgpack + MSGPACK_FIELDS(dummy); + bool operator==(const Response&) const = default; + }; + + std::vector points_buf; // Grumpkin affine elements + uint32_t num_points; + Response execute(BBApiRequest& request) &&; + MSGPACK_FIELDS(points_buf, num_points); + bool operator==(const SrsInitGrumpkinSrs&) const = default; +}; + +} // namespace bb::bbapi diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.cpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.cpp index a76bf68eabac..b5b7c2cb22d1 100644 --- a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.cpp +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.cpp @@ -376,6 +376,18 @@ VkAsFields::Response VkAsFields::execute(BB_UNUSED const BBApiRequest& request) return { std::move(fields) }; } +MegaVkAsFields::Response MegaVkAsFields::execute(BB_UNUSED const BBApiRequest& request) && +{ + BB_BENCH_NAME(MSGPACK_SCHEMA_NAME); + std::vector fields; + + // MegaFlavor for private function verification keys + auto vk = from_buffer(verification_key); + fields = vk.to_field_elements(); + + return { std::move(fields) }; +} + CircuitWriteSolidityVerifier::Response CircuitWriteSolidityVerifier::execute(BB_UNUSED const BBApiRequest& request) && { BB_BENCH_NAME(MSGPACK_SCHEMA_NAME); diff --git a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.hpp b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.hpp index cf8274858154..e714e358f37c 100644 --- a/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.hpp +++ b/barretenberg/cpp/src/barretenberg/bbapi/bbapi_ultra_honk.hpp @@ -144,6 +144,28 @@ struct VkAsFields { bool operator==(const VkAsFields&) const = default; }; +/** + * @struct MegaVkAsFields + * @brief Convert a MegaFlavor verification key to field elements representation. + * Used for private function verification keys which use MegaFlavor (127 fields). + */ +struct MegaVkAsFields { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "MegaVkAsFields"; + + struct Response { + static constexpr const char MSGPACK_SCHEMA_NAME[] = "MegaVkAsFieldsResponse"; + + std::vector fields; + MSGPACK_FIELDS(fields); + bool operator==(const Response&) const = default; + }; + + std::vector verification_key; + MSGPACK_FIELDS(verification_key); + Response execute(const BBApiRequest& request = {}) &&; + bool operator==(const MegaVkAsFields&) const = default; +}; + /** * @brief Command to generate Solidity verifier contract */ diff --git a/barretenberg/cpp/src/barretenberg/benchmark/ipc_bench/ipc.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/ipc_bench/ipc.bench.cpp index dddaa085d3c7..33b231ca9827 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/ipc_bench/ipc.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/ipc_bench/ipc.bench.cpp @@ -95,7 +95,8 @@ template class Poseidon2BBMsgpack : if constexpr (Transport == TransportType::Socket) { ipc_path = "/tmp/poseidon_bb_msgpack_bench.sock"; } else { - ipc_path = "/poseidon_bb_msgpack_shm_bench.shm"; + // Use short name for macOS shm_open 31-char limit + ipc_path = "/p2_bench.shm"; } } @@ -143,7 +144,7 @@ template class Poseidon2BBMsgpack : } else { // Strip .shm suffix for base name std::string base_name = ipc_path.substr(0, ipc_path.size() - 4); - clients[i] = ipc::IpcClient::create_shm(base_name, 10); + clients[i] = ipc::IpcClient::create_shm(base_name, NumClients); } bool connected = false; @@ -168,7 +169,7 @@ template class Poseidon2BBMsgpack : background_threads[i - 1] = std::thread([this, i]() { grumpkin::fq bx = grumpkin::fq::random_element(); grumpkin::fq by = grumpkin::fq::random_element(); - std::array resp_buffer{}; + std::vector resp_buffer(1024 * 1024); while (!stop_background.load(std::memory_order_relaxed)) { // Create Poseidon2Hash command @@ -244,7 +245,7 @@ template class Poseidon2BBMsgpack : // Benchmark implementation shared across all variants void run_benchmark(benchmark::State& state) { - std::array resp_buffer{}; + std::vector resp_buffer(1024 * 1024); for (auto _ : state) { // Create Poseidon2Hash command diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.hpp b/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.hpp index 1ad501a8b9d4..1f4253c4d7fa 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.hpp @@ -139,6 +139,8 @@ struct FqParams { // For consistency with bb::fq, if we ever represent an element of bb::secp256k1::fq in the public inputs, we do so // as a bigfield element, so with 4 public inputs static constexpr size_t PUBLIC_INPUTS_SIZE = BIGFIELD_PUBLIC_INPUTS_SIZE; + + static constexpr char schema_name[] = "secp256k1_fq"; }; using fq = field; @@ -285,6 +287,8 @@ struct FrParams { // For consistency with bb::fq, if we ever represent an element of bb::secp256k1::fr in the public inputs, we do so // as a bigfield element, so with 4 public inputs static constexpr size_t PUBLIC_INPUTS_SIZE = BIGFIELD_PUBLIC_INPUTS_SIZE; + + static constexpr char schema_name[] = "secp256k1_fr"; }; using fr = field; diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/secp256r1/secp256r1.hpp b/barretenberg/cpp/src/barretenberg/ecc/curves/secp256r1/secp256r1.hpp index 836ff708dd68..2c53359c6d55 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/secp256r1/secp256r1.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/secp256r1/secp256r1.hpp @@ -12,6 +12,8 @@ namespace bb::secp256r1 { // NOLINTBEGIN(cppcoreguidelines-avoid-c-arrays) struct FqParams { + static constexpr const char* schema_name = "secp256r1_fq"; + // A little-endian representation of the modulus split into 4 64-bit words static constexpr uint64_t modulus_0 = 0xFFFFFFFFFFFFFFFFULL; static constexpr uint64_t modulus_1 = 0x00000000FFFFFFFFULL; @@ -140,6 +142,7 @@ struct FqParams { using fq = field; struct FrParams { + static constexpr const char* schema_name = "secp256r1_fr"; // A little-endian representation of the modulus split into 4 64-bit words static constexpr uint64_t modulus_0 = 0xF3B9CAC2FC632551ULL; diff --git a/barretenberg/cpp/src/barretenberg/ipc/shm/futex.hpp b/barretenberg/cpp/src/barretenberg/ipc/shm/futex.hpp new file mode 100644 index 000000000000..b27816299259 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/ipc/shm/futex.hpp @@ -0,0 +1,75 @@ +/** + * @file futex.hpp + * @brief Cross-platform futex-like synchronization primitives + * + * Provides unified wait/wake operations for cross-process synchronization: + * - macOS: Uses os_sync_wait_on_address / os_sync_wake_by_address_any + * - Linux: Uses futex syscalls + */ +#pragma once + +#include + +#ifdef __APPLE__ +// Darwin's os_sync API (available since macOS 10.12 / iOS 10) +// Forward declarations to avoid header dependency +extern "C" { +int os_sync_wait_on_address(void* addr, uint64_t value, size_t size, uint32_t flags); +int os_sync_wake_by_address_any(void* addr, size_t size, uint32_t flags); +} +#define OS_SYNC_WAIT_ON_ADDRESS_SHARED 1u +#define OS_SYNC_WAKE_BY_ADDRESS_SHARED 1u +#else +// Linux futex +#include +#include +#include +#endif + +namespace bb::ipc { + +/** + * @brief Atomic compare-and-wait operation + * + * Blocks if the value at addr equals expect. Works across process boundaries. + * + * @param addr Pointer to 32-bit value to wait on + * @param expect Expected value - blocks if *addr == expect + * @return 0 on wake, -1 on error + */ +inline int futex_wait(volatile uint32_t* addr, uint32_t expect) +{ +#ifdef __APPLE__ + // macOS: Use os_sync_wait_on_address with SHARED flag for cross-process + return os_sync_wait_on_address( + const_cast(addr), static_cast(expect), sizeof(uint32_t), OS_SYNC_WAIT_ON_ADDRESS_SHARED); +#else + // Linux futex + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg) + return static_cast(syscall(SYS_futex, addr, FUTEX_WAIT, expect, nullptr, nullptr, 0)); +#endif +} + +/** + * @brief Wake waiters blocked on an address + * + * Wakes up to n waiters blocked on addr. Works across process boundaries. + * + * @param addr Pointer to 32-bit value to wake on + * @param n Number of waiters to wake (1 for single, INT_MAX for all) + * @return Number of waiters woken, or -1 on error + */ +inline int futex_wake(volatile uint32_t* addr, int n) +{ +#ifdef __APPLE__ + // macOS: Use os_sync_wake_by_address with SHARED flag for cross-process + (void)n; + return os_sync_wake_by_address_any(const_cast(addr), sizeof(uint32_t), OS_SYNC_WAKE_BY_ADDRESS_SHARED); +#else + // Linux futex + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg) + return static_cast(syscall(SYS_futex, addr, FUTEX_WAKE, n, nullptr, nullptr, 0)); +#endif +} + +} // namespace bb::ipc diff --git a/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.cpp b/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.cpp index 7fe92e594c22..d8aa52943060 100644 --- a/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.cpp +++ b/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.cpp @@ -1,55 +1,20 @@ -#include "barretenberg/ipc/shm/mpsc_shm.hpp" +#include "mpsc_shm.hpp" +#include "futex.hpp" +#include "utilities.hpp" #include #include #include #include #include -#include #include #include #include -#include -#include // NOLINT(modernize-deprecated-headers) - need POSIX clock_gettime/CLOCK_MONOTONIC #include #include #include -#if defined(__x86_64__) || defined(_M_X64) -#include -#define MPSC_PAUSE() _mm_pause() -#else -#define MPSC_PAUSE() \ - do { \ - } while (0) -#endif - namespace bb::ipc { -namespace { -// ----- Utilities ----- - -inline uint64_t mpsc_mono_ns_now() -{ - struct timespec ts; - if (clock_gettime(CLOCK_MONOTONIC, &ts) != 0) { - return 0; - } - return (static_cast(ts.tv_sec) * 1000000000ULL) + static_cast(ts.tv_nsec); -} - -inline int mpsc_futex_wait(volatile uint32_t* addr, uint32_t expect) -{ - // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg) - return static_cast(syscall(SYS_futex, addr, FUTEX_WAIT, expect, nullptr, nullptr, 0)); -} - -inline int mpsc_futex_wake(volatile uint32_t* addr, int n) -{ - // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg) - return static_cast(syscall(SYS_futex, addr, FUTEX_WAKE, n, nullptr, nullptr, 0)); -} -} // anonymous namespace - // ----- MpscConsumer Implementation ----- MpscConsumer::MpscConsumer(std::vector&& rings, int doorbell_fd, size_t doorbell_len, MpscDoorbell* doorbell) @@ -196,7 +161,7 @@ int MpscConsumer::wait_for_data(uint32_t spin_ns) // Phase 2: Spin phase if (spin_ns > 0) { - uint64_t start = mpsc_mono_ns_now(); + uint64_t start = mono_ns_now(); // NOLINTNEXTLINE(cppcoreguidelines-avoid-do-while) do { for (size_t i = 0; i < num_rings; i++) { @@ -206,8 +171,8 @@ int MpscConsumer::wait_for_data(uint32_t spin_ns) return static_cast(idx); } } - MPSC_PAUSE(); - } while ((mpsc_mono_ns_now() - start) < spin_ns); + IPC_PAUSE(); + } while ((mono_ns_now() - start) < spin_ns); } // Phase 3: Sleep on doorbell @@ -222,7 +187,7 @@ int MpscConsumer::wait_for_data(uint32_t spin_ns) } } - mpsc_futex_wait(reinterpret_cast(&doorbell_->seq), seq); + futex_wait(reinterpret_cast(&doorbell_->seq), seq); // After waking, poll again for (size_t i = 0; i < num_rings; i++) { @@ -359,7 +324,7 @@ void MpscProducer::publish(size_t n) // Ring doorbell to wake consumer // Note: We always ring the doorbell - see spsc_shm.cpp for explanation doorbell_->seq.fetch_add(1, std::memory_order_release); - mpsc_futex_wake(reinterpret_cast(&doorbell_->seq), 1); + futex_wake(reinterpret_cast(&doorbell_->seq), 1); } bool MpscProducer::wait_for_space(size_t need, uint32_t spin_ns) diff --git a/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.hpp b/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.hpp index 7d3e321a38fa..3fb8c7e59a01 100644 --- a/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.hpp +++ b/barretenberg/cpp/src/barretenberg/ipc/shm/mpsc_shm.hpp @@ -7,7 +7,7 @@ #pragma once -#include "barretenberg/ipc/shm/spsc_shm.hpp" +#include "spsc_shm.hpp" #include #include #include diff --git a/barretenberg/cpp/src/barretenberg/ipc/shm/spsc_shm.cpp b/barretenberg/cpp/src/barretenberg/ipc/shm/spsc_shm.cpp index e944f5461e04..0462aac2215a 100644 --- a/barretenberg/cpp/src/barretenberg/ipc/shm/spsc_shm.cpp +++ b/barretenberg/cpp/src/barretenberg/ipc/shm/spsc_shm.cpp @@ -1,31 +1,20 @@ -#include "barretenberg/ipc/shm/spsc_shm.hpp" +#include "spsc_shm.hpp" +#include "futex.hpp" +#include "utilities.hpp" #include #include #include #include #include -#include #include #include #include #include -#include -#include // NOLINT(modernize-deprecated-headers) - need POSIX clock_gettime/CLOCK_MONOTONIC #include -#if defined(__x86_64__) || defined(_M_X64) -#include -#define SPSC_PAUSE() _mm_pause() -#else -#define SPSC_PAUSE() \ - do { \ - } while (0) -#endif - namespace bb::ipc { namespace { -// ----- Utilities ----- inline uint64_t pow2_ceil_u64(uint64_t x) { @@ -42,27 +31,6 @@ inline uint64_t pow2_ceil_u64(uint64_t x) return x + 1; } -inline uint64_t mono_ns_now() -{ - struct timespec ts; - if (clock_gettime(CLOCK_MONOTONIC, &ts) != 0) { - return 0; - } - return (static_cast(ts.tv_sec) * 1000000000ULL) + static_cast(ts.tv_nsec); -} - -// Futex helpers -inline int futex_wait(volatile uint32_t* addr, uint32_t expect) -{ - // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg) - return static_cast(syscall(SYS_futex, addr, FUTEX_WAIT, expect, nullptr, nullptr, 0)); -} - -inline int futex_wake(volatile uint32_t* addr, int n) -{ - // NOLINTNEXTLINE(cppcoreguidelines-pro-type-vararg) - return static_cast(syscall(SYS_futex, addr, FUTEX_WAKE, n, nullptr, nullptr, 0)); -} } // anonymous namespace // ----- SpscShm Implementation ----- @@ -393,7 +361,7 @@ bool SpscShm::wait_for_data(uint32_t spin_ns) if (available() > 0) { return true; } - SPSC_PAUSE(); + IPC_PAUSE(); } while ((mono_ns_now() - start) < spin_ns); } @@ -421,7 +389,7 @@ bool SpscShm::wait_for_space(size_t need, uint32_t spin_ns) if (free_space() >= need) { return true; } - SPSC_PAUSE(); + IPC_PAUSE(); } while ((mono_ns_now() - start) < spin_ns); } diff --git a/barretenberg/cpp/src/barretenberg/ipc/shm/utilities.hpp b/barretenberg/cpp/src/barretenberg/ipc/shm/utilities.hpp new file mode 100644 index 000000000000..709deca3159c --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/ipc/shm/utilities.hpp @@ -0,0 +1,40 @@ +/** + * @file utilities.hpp + * @brief Common utilities for IPC shared memory implementation + * + * Provides timing and CPU pause utilities for spin-wait loops. + */ +#pragma once + +#include +#include // NOLINT(modernize-deprecated-headers) - need POSIX clock_gettime/CLOCK_MONOTONIC + +#if defined(__x86_64__) || defined(_M_X64) +#include +#define IPC_PAUSE() _mm_pause() +#else +#define IPC_PAUSE() \ + do { \ + } while (0) +#endif + +namespace bb::ipc { + +/** + * @brief Get current monotonic time in nanoseconds + * + * Uses CLOCK_MONOTONIC which is suitable for measuring elapsed time + * and not affected by system clock adjustments. + * + * @return Current monotonic time in nanoseconds, or 0 on error + */ +inline uint64_t mono_ns_now() +{ + struct timespec ts; + if (clock_gettime(CLOCK_MONOTONIC, &ts) != 0) { + return 0; + } + return (static_cast(ts.tv_sec) * 1000000000ULL) + static_cast(ts.tv_nsec); +} + +} // namespace bb::ipc diff --git a/barretenberg/cpp/src/barretenberg/ipc/socket_server.cpp b/barretenberg/cpp/src/barretenberg/ipc/socket_server.cpp index f77007638283..1688aaf7f68d 100644 --- a/barretenberg/cpp/src/barretenberg/ipc/socket_server.cpp +++ b/barretenberg/cpp/src/barretenberg/ipc/socket_server.cpp @@ -3,13 +3,19 @@ #include #include #include -#include #include #include #include #include #include +// Platform-specific event notification includes +#ifdef __APPLE__ +#include // kqueue on macOS/BSD +#else +#include // epoll on Linux +#endif + namespace bb::ipc { SocketServer::SocketServer(std::string socket_path, int initial_max_clients) @@ -60,6 +66,28 @@ bool SocketServer::listen() return false; } +#ifdef __APPLE__ + // Create kqueue instance + kqueue_fd_ = kqueue(); + if (kqueue_fd_ < 0) { + ::close(listen_fd_); + listen_fd_ = -1; + ::unlink(socket_path_.c_str()); + return false; + } + + // Add listen socket to kqueue + struct kevent ev; + EV_SET(&ev, listen_fd_, EVFILT_READ, EV_ADD | EV_ENABLE, 0, 0, nullptr); + if (kevent(kqueue_fd_, &ev, 1, nullptr, 0, nullptr) < 0) { + ::close(kqueue_fd_); + kqueue_fd_ = -1; + ::close(listen_fd_); + listen_fd_ = -1; + ::unlink(socket_path_.c_str()); + return false; + } +#else // Create epoll instance epoll_fd_ = epoll_create1(0); if (epoll_fd_ < 0) { @@ -81,6 +109,7 @@ bool SocketServer::listen() ::unlink(socket_path_.c_str()); return false; } +#endif return true; } @@ -92,7 +121,33 @@ int SocketServer::accept(uint64_t timeout_ns) return -1; } - // Wait for connection +#ifdef __APPLE__ + // Wait for connection using kqueue + struct kevent ev; + struct timespec timeout; + struct timespec* timeout_ptr = nullptr; + + if (timeout_ns > 0) { + timeout.tv_sec = static_cast(timeout_ns / 1000000000ULL); + timeout.tv_nsec = static_cast(timeout_ns % 1000000000ULL); + timeout_ptr = &timeout; + } else if (timeout_ns == 0) { + timeout.tv_sec = 0; + timeout.tv_nsec = 0; + timeout_ptr = &timeout; + } + + int n = kevent(kqueue_fd_, nullptr, 0, &ev, 1, timeout_ptr); + if (n <= 0) { + return -1; + } + + if (static_cast(ev.ident) != listen_fd_) { + errno = EINVAL; + return -1; + } +#else + // Wait for connection using epoll struct epoll_event ev; int timeout_ms = -1; // default: infinite if (timeout_ns > 0) { @@ -109,6 +164,7 @@ int SocketServer::accept(uint64_t timeout_ns) errno = EINVAL; return -1; } +#endif // Accept connection int client_fd = ::accept(listen_fd_, nullptr, nullptr); @@ -128,6 +184,15 @@ int SocketServer::accept(uint64_t timeout_ns) fd_to_client_id_[client_fd] = client_id; num_clients_++; +#ifdef __APPLE__ + // Add client to kqueue + struct kevent kev; + EV_SET(&kev, client_fd, EVFILT_READ, EV_ADD | EV_ENABLE, 0, 0, nullptr); + if (kevent(kqueue_fd_, &kev, 1, nullptr, 0, nullptr) < 0) { + disconnect_client(client_id); + return -1; + } +#else // Add client to epoll ev.events = EPOLLIN; ev.data.fd = client_fd; @@ -135,12 +200,55 @@ int SocketServer::accept(uint64_t timeout_ns) disconnect_client(client_id); return -1; } +#endif return client_id; } int SocketServer::wait_for_data(uint64_t timeout_ns) { +#ifdef __APPLE__ + if (kqueue_fd_ < 0) { + errno = EINVAL; + return -1; + } + + struct kevent ev; + struct timespec timeout; + struct timespec* timeout_ptr = nullptr; + + if (timeout_ns > 0) { + timeout.tv_sec = static_cast(timeout_ns / 1000000000ULL); + timeout.tv_nsec = static_cast(timeout_ns % 1000000000ULL); + timeout_ptr = &timeout; + } else if (timeout_ns == 0) { + timeout.tv_sec = 0; + timeout.tv_nsec = 0; + timeout_ptr = &timeout; + } + + int n = kevent(kqueue_fd_, nullptr, 0, &ev, 1, timeout_ptr); + if (n <= 0) { + return -1; + } + + int ready_fd = static_cast(ev.ident); + + // Check if it's listen socket (new connection) or client data + if (ready_fd == listen_fd_) { + errno = EAGAIN; // Signal caller to call accept + return -1; + } + + // Find which client + auto it = fd_to_client_id_.find(ready_fd); + if (it == fd_to_client_id_.end()) { + errno = ENOENT; + return -1; + } + + return it->second; +#else if (epoll_fd_ < 0) { errno = EINVAL; return -1; @@ -167,6 +275,7 @@ int SocketServer::wait_for_data(uint64_t timeout_ns) } return it->second; +#endif } ssize_t SocketServer::recv(int client_id, void* buffer, size_t max_len) @@ -253,10 +362,17 @@ void SocketServer::close_internal() fd_to_client_id_.clear(); num_clients_ = 0; +#ifdef __APPLE__ + if (kqueue_fd_ >= 0) { + ::close(kqueue_fd_); + kqueue_fd_ = -1; + } +#else if (epoll_fd_ >= 0) { ::close(epoll_fd_); epoll_fd_ = -1; } +#endif if (listen_fd_ >= 0) { ::close(listen_fd_); @@ -275,7 +391,15 @@ void SocketServer::disconnect_client(int client_id) int fd = client_fds_[static_cast(client_id)]; if (fd >= 0) { +#ifdef __APPLE__ + // For kqueue, we don't need explicit deletion - closing the fd removes it automatically + // But we can explicitly remove it for clarity + struct kevent ev; + EV_SET(&ev, fd, EVFILT_READ, EV_DELETE, 0, 0, nullptr); + kevent(kqueue_fd_, &ev, 1, nullptr, 0, nullptr); +#else epoll_ctl(epoll_fd_, EPOLL_CTL_DEL, fd, nullptr); +#endif ::close(fd); fd_to_client_id_.erase(fd); client_fds_[static_cast(client_id)] = -1; diff --git a/barretenberg/cpp/src/barretenberg/ipc/socket_server.hpp b/barretenberg/cpp/src/barretenberg/ipc/socket_server.hpp index 840cad54d328..28d725428083 100644 --- a/barretenberg/cpp/src/barretenberg/ipc/socket_server.hpp +++ b/barretenberg/cpp/src/barretenberg/ipc/socket_server.hpp @@ -13,7 +13,9 @@ namespace bb::ipc { /** * @brief IPC server implementation using Unix domain sockets * - * Direct implementation using epoll for efficient multi-client handling. + * Platform-specific implementation: + * - Linux: uses epoll for efficient multi-client handling + * - macOS: uses kqueue for efficient multi-client handling * Dynamic client capacity with no artificial limits. */ class SocketServer : public IpcServer { @@ -42,7 +44,11 @@ class SocketServer : public IpcServer { std::string socket_path_; int initial_max_clients_; int listen_fd_ = -1; - int epoll_fd_ = -1; +#ifdef __APPLE__ + int kqueue_fd_ = -1; // macOS: kqueue for event notification +#else + int epoll_fd_ = -1; // Linux: epoll for event notification +#endif std::vector client_fds_; // client_id -> fd std::unordered_map fd_to_client_id_; // fd -> client_id (for fast lookup) int num_clients_ = 0; diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/nodejs_module/CMakeLists.txt index 0a2e316ac8a9..70b5686b964d 100644 --- a/barretenberg/cpp/src/barretenberg/nodejs_module/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/CMakeLists.txt @@ -27,4 +27,4 @@ string(REGEX REPLACE "[\r\n\"]" "" NODE_API_HEADERS_DIR ${NODE_API_HEADERS_DIR}) add_library(nodejs_module SHARED ${SOURCE_FILES}) set_target_properties(nodejs_module PROPERTIES PREFIX "" SUFFIX ".node") target_include_directories(nodejs_module PRIVATE ${NODE_API_HEADERS_DIR} ${NODE_ADDON_API_DIR}) -target_link_libraries(nodejs_module PRIVATE world_state) +target_link_libraries(nodejs_module PRIVATE world_state ipc) diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/init_module.cpp b/barretenberg/cpp/src/barretenberg/nodejs_module/init_module.cpp index 8cfa6c36f8d2..a8ab0f653aef 100644 --- a/barretenberg/cpp/src/barretenberg/nodejs_module/init_module.cpp +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/init_module.cpp @@ -1,4 +1,5 @@ #include "barretenberg/nodejs_module/lmdb_store/lmdb_store_wrapper.hpp" +#include "barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.hpp" #include "barretenberg/nodejs_module/world_state/world_state.hpp" #include "napi.h" @@ -6,6 +7,8 @@ Napi::Object Init(Napi::Env env, Napi::Object exports) { exports.Set(Napi::String::New(env, "WorldState"), bb::nodejs::WorldStateWrapper::get_class(env)); exports.Set(Napi::String::New(env, "LMDBStore"), bb::nodejs::lmdb_store::LMDBStoreWrapper::get_class(env)); + exports.Set(Napi::String::New(env, "MsgpackClient"), + bb::nodejs::msgpack_client::MsgpackClientWrapper::get_class(env)); return exports; } diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.cpp b/barretenberg/cpp/src/barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.cpp new file mode 100644 index 000000000000..e39d3e05a7b5 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.cpp @@ -0,0 +1,100 @@ +#include "barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.hpp" +#include "barretenberg/ipc/ipc_client.hpp" +#include "napi.h" +#include +#include + +using namespace bb::nodejs::msgpack_client; + +MsgpackClientWrapper::MsgpackClientWrapper(const Napi::CallbackInfo& info) + : ObjectWrap(info) +{ + Napi::Env env = info.Env(); + + // Arg 0: shared memory base name (string) + if (info.Length() < 1 || !info[0].IsString()) { + throw Napi::TypeError::New(env, "First argument must be a string (shared memory name)"); + } + std::string shm_name = info[0].As(); + + // Arg 1: max clients (number, default 1) + size_t max_clients = 1; + if (info.Length() > 1 && info[1].IsNumber()) { + max_clients = info[1].As().Uint32Value(); + } + + // Allocate response buffer once (16MB should be enough for most responses) + const size_t MAX_RESPONSE_SIZE = 16 * 1024 * 1024; + response_buffer_.resize(MAX_RESPONSE_SIZE); + + // Create shared memory client + client_ = bb::ipc::IpcClient::create_shm(shm_name, max_clients); + + // Connect to bb server + if (!client_->connect()) { + throw Napi::Error::New(env, "Failed to connect to shared memory server"); + } + + connected_ = true; +} + +MsgpackClientWrapper::~MsgpackClientWrapper() +{ + if (client_ && connected_) { + client_->close(); + } +} + +Napi::Value MsgpackClientWrapper::call(const Napi::CallbackInfo& info) +{ + Napi::Env env = info.Env(); + + if (!connected_) { + throw Napi::Error::New(env, "Client is not connected"); + } + + // Arg 0: msgpack buffer to send + if (info.Length() < 1 || !info[0].IsBuffer()) { + throw Napi::TypeError::New(env, "First argument must be a Buffer"); + } + + auto input_buffer = info[0].As>(); + const uint8_t* input_data = input_buffer.Data(); + size_t input_len = input_buffer.Length(); + + // Send request (timeout 0 = infinite) + if (!client_->send(input_data, input_len, 0)) { + throw Napi::Error::New(env, "Failed to send msgpack request"); + } + + // Receive response using pre-allocated buffer + ssize_t bytes_received = client_->recv(response_buffer_.data(), response_buffer_.size(), 0); + if (bytes_received < 0) { + throw Napi::Error::New(env, "Failed to receive msgpack response"); + } + + // Create JavaScript Buffer with the response + return Napi::Buffer::Copy(env, response_buffer_.data(), static_cast(bytes_received)); +} + +Napi::Value MsgpackClientWrapper::close(const Napi::CallbackInfo& info) +{ + Napi::Env env = info.Env(); + + if (client_ && connected_) { + client_->close(); + connected_ = false; + } + + return env.Undefined(); +} + +Napi::Function MsgpackClientWrapper::get_class(Napi::Env env) +{ + return DefineClass(env, + "MsgpackClient", + { + MsgpackClientWrapper::InstanceMethod("call", &MsgpackClientWrapper::call), + MsgpackClientWrapper::InstanceMethod("close", &MsgpackClientWrapper::close), + }); +} diff --git a/barretenberg/cpp/src/barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.hpp b/barretenberg/cpp/src/barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.hpp new file mode 100644 index 000000000000..49b1b2d33586 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/nodejs_module/msgpack_client/msgpack_client_wrapper.hpp @@ -0,0 +1,40 @@ +#pragma once + +#include "barretenberg/ipc/ipc_client.hpp" +#include "napi.h" +#include + +namespace bb::nodejs::msgpack_client { + +/** + * @brief NAPI wrapper for msgpack calls via shared memory IPC + * + * Provides a simple synchronous interface to send msgpack buffers + * to the bb binary via shared memory and receive responses. + */ +class MsgpackClientWrapper : public Napi::ObjectWrap { + public: + MsgpackClientWrapper(const Napi::CallbackInfo& info); + ~MsgpackClientWrapper(); + + /** + * @brief Send a msgpack buffer and receive response + * @param info[0] - Buffer containing msgpack data + * @returns Buffer containing msgpack response + */ + Napi::Value call(const Napi::CallbackInfo& info); + + /** + * @brief Close the shared memory connection + */ + Napi::Value close(const Napi::CallbackInfo& info); + + static Napi::Function get_class(Napi::Env env); + + private: + std::unique_ptr client_; + std::vector response_buffer_; + bool connected_ = false; +}; + +} // namespace bb::nodejs::msgpack_client diff --git a/barretenberg/cpp/src/barretenberg/serialize/msgpack_impl.hpp b/barretenberg/cpp/src/barretenberg/serialize/msgpack_impl.hpp index c71961d8e638..b4056726de42 100644 --- a/barretenberg/cpp/src/barretenberg/serialize/msgpack_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/serialize/msgpack_impl.hpp @@ -22,27 +22,40 @@ /** * Represents this as a bbmalloc'ed object, fit for sending to e.g. TypeScript. * @param obj The object. - * @return The buffer pointer/size pair. + * @param scratch_buf Optional pre-allocated scratch buffer to use if result fits. + * @param scratch_size Size of the scratch buffer. + * @return The buffer pointer/size pair. Returns scratch_buf if result fits, otherwise allocates new buffer. */ -inline std::pair msgpack_encode_buffer(auto&& obj) +inline std::pair msgpack_encode_buffer(auto&& obj, + uint8_t* scratch_buf = nullptr, + size_t scratch_size = 0) { // Create a buffer to store the encoded data msgpack::sbuffer buffer; msgpack::pack(buffer, obj); + // If scratch buffer provided and result fits, use it + if (scratch_buf != nullptr && buffer.size() <= scratch_size) { + memcpy(scratch_buf, buffer.data(), buffer.size()); + return { scratch_buf, buffer.size() }; + } + + // Otherwise allocate new buffer uint8_t* output = static_cast(aligned_alloc(64, buffer.size())); memcpy(output, buffer.data(), buffer.size()); - // Convert the buffer data to a string and return it return { output, buffer.size() }; } // This function is intended to bind a function to a MessagePack-formatted input data, // perform the function with the unpacked data, then pack the result back into MessagePack format. +// Note: output_out and output_len_out are IN-OUT parameters: +// IN: Caller provides scratch buffer pointer and size +// OUT: Returns actual result buffer (may be scratch or newly allocated) and size inline void msgpack_cbind_impl(const auto& func, // The function to be applied const uint8_t* input_in, // The input data in MessagePack format size_t input_len_in, // The length of the input data - uint8_t** output_out, // The output data in MessagePack format - size_t* output_len_out) // The length of the output data + uint8_t** output_out, // IN-OUT: scratch buffer ptr / result buffer ptr + size_t* output_len_out) // IN-OUT: scratch buffer size / result size { using FuncTraits = decltype(get_func_traits()); // Args: the parameter types of the function as a tuple. @@ -51,10 +64,17 @@ inline void msgpack_cbind_impl(const auto& func, // The function to be ap // Unpack the input data into the parameter tuple. msgpack::unpack(reinterpret_cast(input_in), input_len_in).get().convert(params); + // Read IN values: caller-provided scratch buffer + uint8_t* scratch_buf = *output_out; + size_t scratch_size = *output_len_out; + // Apply the function to the parameters, then encode the result into a MessagePack buffer. - auto [output, output_len] = msgpack_encode_buffer(FuncTraits::apply(func, params)); + // Try to use scratch buffer; allocate if result doesn't fit. + auto [output, output_len] = msgpack_encode_buffer(FuncTraits::apply(func, params), scratch_buf, scratch_size); - // Assign the output data and its length to the given output parameters. + // Write OUT values: actual result buffer and size + // If result fit in scratch, output == scratch_buf (pointer unchanged) + // If result didn't fit, output is newly allocated buffer (pointer changed) *output_out = output; *output_len_out = output_len; } diff --git a/barretenberg/exports.json b/barretenberg/exports.json deleted file mode 100644 index 8480fb6cc80b..000000000000 --- a/barretenberg/exports.json +++ /dev/null @@ -1,933 +0,0 @@ -[ - { - "functionName": "pedersen_commit", - "inArgs": [ - { - "name": "inputs_buffer", - "type": "fr::vec_in_buf" - }, - { - "name": "ctx_index", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "output", - "type": "affine_element::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "pedersen_hash", - "inArgs": [ - { - "name": "inputs_buffer", - "type": "fr::vec_in_buf" - }, - { - "name": "hash_index", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "output", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "pedersen_hashes", - "inArgs": [ - { - "name": "inputs_buffer", - "type": "fr::vec_in_buf" - }, - { - "name": "hash_index", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "output", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "pedersen_hash_buffer", - "inArgs": [ - { - "name": "input_buffer", - "type": "const uint8_t *" - }, - { - "name": "hash_index", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "output", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "poseidon2_hash", - "inArgs": [ - { - "name": "inputs_buffer", - "type": "fr::vec_in_buf" - } - ], - "outArgs": [ - { - "name": "output", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "poseidon2_hashes", - "inArgs": [ - { - "name": "inputs_buffer", - "type": "fr::vec_in_buf" - } - ], - "outArgs": [ - { - "name": "output", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "poseidon2_permutation", - "inArgs": [ - { - "name": "inputs_buffer", - "type": "fr::vec_in_buf" - } - ], - "outArgs": [ - { - "name": "output", - "type": "fr::vec_out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "poseidon2_hash_accumulate", - "inArgs": [ - { - "name": "inputs_buffer", - "type": "fr::vec_in_buf" - } - ], - "outArgs": [ - { - "name": "output", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "blake2s", - "inArgs": [ - { - "name": "data", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "r", - "type": "out_buf32" - } - ], - "isAsync": false - }, - { - "functionName": "blake2s_to_field_", - "inArgs": [ - { - "name": "data", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "r", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "aes_encrypt_buffer_cbc", - "inArgs": [ - { - "name": "input", - "type": "const uint8_t *" - }, - { - "name": "iv", - "type": "const uint8_t *" - }, - { - "name": "key", - "type": "const uint8_t *" - }, - { - "name": "length", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "r", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "aes_decrypt_buffer_cbc", - "inArgs": [ - { - "name": "input", - "type": "const uint8_t *" - }, - { - "name": "iv", - "type": "const uint8_t *" - }, - { - "name": "key", - "type": "const uint8_t *" - }, - { - "name": "length", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "r", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "srs_init_srs", - "inArgs": [ - { - "name": "points_buf", - "type": "const uint8_t *" - }, - { - "name": "num_points", - "type": "const uint32_t *" - } - ], - "outArgs": [], - "isAsync": false - }, - { - "functionName": "srs_init_grumpkin_srs", - "inArgs": [ - { - "name": "points_buf", - "type": "const uint8_t *" - }, - { - "name": "num_points", - "type": "const uint32_t *" - } - ], - "outArgs": [], - "isAsync": false - }, - { - "functionName": "test_threads", - "inArgs": [ - { - "name": "threads", - "type": "const uint32_t *" - }, - { - "name": "iterations", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint32_t *" - } - ], - "isAsync": false - }, - { - "functionName": "common_init_slab_allocator", - "inArgs": [ - { - "name": "circuit_size", - "type": "const uint32_t *" - } - ], - "outArgs": [], - "isAsync": false - }, - { - "functionName": "acir_get_circuit_sizes", - "inArgs": [ - { - "name": "constraint_system_buf", - "type": "const uint8_t *" - }, - { - "name": "recursive", - "type": "const bool *" - }, - { - "name": "honk_recursion", - "type": "const bool *" - } - ], - "outArgs": [ - { - "name": "total", - "type": "uint32_t *" - }, - { - "name": "subgroup", - "type": "uint32_t *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_and_verify_ultra_honk", - "inArgs": [ - { - "name": "constraint_system_buf", - "type": "const uint8_t *" - }, - { - "name": "witness_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_and_verify_mega_honk", - "inArgs": [ - { - "name": "constraint_system_buf", - "type": "const uint8_t *" - }, - { - "name": "witness_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_aztec_client", - "inArgs": [ - { - "name": "ivc_inputs_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out_proof", - "type": "uint8_t **" - }, - { - "name": "out_vk", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_verify_aztec_client", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_load_verification_key", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [], - "isAsync": false - }, - { - "functionName": "acir_init_verification_key", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - } - ], - "outArgs": [], - "isAsync": false - }, - { - "functionName": "acir_get_verification_key", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_get_prover_instance", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - }, - { - "name": "acir_vec", - "type": "const uint8_t *" - }, - { - "name": "recursive", - "type": "const bool *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_verify_proof", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - }, - { - "name": "proof_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_get_solidity_verifier", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - } - ], - "outArgs": [ - { - "name": "out", - "type": "out_str_buf" - } - ], - "isAsync": false - }, - { - "functionName": "acir_honk_solidity_verifier", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "out_str_buf" - } - ], - "isAsync": false - }, - { - "functionName": "acir_serialize_proof_into_fields", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - }, - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "num_inner_public_inputs", - "type": "const uint32_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "fr::vec_out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "acir_serialize_verification_key_into_fields", - "inArgs": [ - { - "name": "acir_composer_ptr", - "type": "in_ptr" - } - ], - "outArgs": [ - { - "name": "out_vkey", - "type": "fr::vec_out_buf" - }, - { - "name": "out_key_hash", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_ultra_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - }, - { - "name": "witness_vec", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_ultra_keccak_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - }, - { - "name": "witness_vec", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_ultra_keccak_zk_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - }, - { - "name": "witness_vec", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_ultra_starknet_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - }, - { - "name": "witness_vec", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_prove_ultra_starknet_zk_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - }, - { - "name": "witness_vec", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_verify_ultra_honk", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_verify_ultra_keccak_honk", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_verify_ultra_keccak_zk_honk", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_verify_ultra_starknet_honk", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_verify_ultra_starknet_zk_honk", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - }, - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "result", - "type": "bool *" - } - ], - "isAsync": false - }, - { - "functionName": "acir_write_vk_ultra_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_write_vk_ultra_keccak_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_write_vk_ultra_keccak_zk_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_write_vk_ultra_starknet_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_write_vk_ultra_starknet_zk_honk", - "inArgs": [ - { - "name": "acir_vec", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - }, - { - "functionName": "acir_proof_as_fields_ultra_honk", - "inArgs": [ - { - "name": "proof_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "fr::vec_out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "acir_vk_as_fields_ultra_honk", - "inArgs": [ - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out_vkey", - "type": "fr::vec_out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "acir_vk_as_fields_mega_honk", - "inArgs": [ - { - "name": "vk_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out_vkey", - "type": "fr::vec_out_buf" - } - ], - "isAsync": false - }, - { - "functionName": "acir_gates_aztec_client", - "inArgs": [ - { - "name": "ivc_inputs_buf", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "out", - "type": "uint8_t **" - } - ], - "isAsync": false - } -] diff --git a/barretenberg/scripts/bindgen.sh b/barretenberg/scripts/bindgen.sh deleted file mode 100755 index a0053b5d8efc..000000000000 --- a/barretenberg/scripts/bindgen.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# Run from aztec-packages/barretenberg. -set -eu - -if ! dpkg -l python3-clang-18 &> /dev/null; then - echo "You need to install python clang 18 e.g.: apt install python3-clang-18" - exit 1 -fi - -#find ./cpp/src -type f -name "c_bind*.hpp" > ./scripts/c_bind_files.txt -cat ./scripts/c_bind_files.txt | ./scripts/decls_json.py > exports.json -( - cd ./ts && \ - yarn install && \ - yarn node --loader ts-node/esm ./src/bindgen/index.ts ../exports.json > ./src/barretenberg_api/index.ts && \ - yarn prettier -w ./src/barretenberg_api/index.ts -) diff --git a/barretenberg/scripts/c_bind_files.txt b/barretenberg/scripts/c_bind_files.txt deleted file mode 100644 index 7795a9f92ecb..000000000000 --- a/barretenberg/scripts/c_bind_files.txt +++ /dev/null @@ -1,8 +0,0 @@ -./cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.hpp -./cpp/src/barretenberg/crypto/pedersen_hash/c_bind.hpp -./cpp/src/barretenberg/crypto/poseidon2/c_bind.hpp -./cpp/src/barretenberg/crypto/blake2s/c_bind.hpp -./cpp/src/barretenberg/crypto/aes128/c_bind.hpp -./cpp/src/barretenberg/srs/c_bind.hpp -./cpp/src/barretenberg/common/c_bind.hpp -./cpp/src/barretenberg/dsl/acir_proofs/c_bind.hpp diff --git a/barretenberg/scripts/decls_json.py b/barretenberg/scripts/decls_json.py deleted file mode 100755 index b1a9c19a4e64..000000000000 --- a/barretenberg/scripts/decls_json.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -import sys -import json -import clang.cindex -from typing import List - -clang.cindex.Config.set_library_file('/usr/lib/llvm-18/lib/libclang-18.so.1') - -def has_annotation(node, annotation): - for child in node.get_children(): - if child.kind == clang.cindex.CursorKind.ANNOTATE_ATTR and annotation in child.spelling: - return True - return False - -def print_diagnostic(diagnostic, file=sys.stdout): - # color codes for printing - BLUE = '\033[94m' - YELLOW = '\033[93m' - RED = '\033[91m' - ENDC = '\033[0m' - - color_map = { - clang.cindex.Diagnostic.Warning: YELLOW, - clang.cindex.Diagnostic.Error: RED, - clang.cindex.Diagnostic.Fatal: RED, - } - color = color_map.get(diagnostic.severity, BLUE) - print(color + str(diagnostic) + ENDC, file=file) - - -def process_files(files: List[str]) -> List[dict]: - result = [] - idx = clang.cindex.Index.create() - for path in files: - print(f"Processing {path}", file=sys.stderr) - tu = idx.parse(path, args=[ - "-I./cpp/src", - # HACK: this is a workaround for bindings gen finding tracy as it is referenced by the bb field header. - "-I./cpp/build/_deps/tracy-src/public", - "-I./cpp/build/_deps/msgpack-c/src/msgpack-c/include", - '-std=gnu++20', '-Wall', '-Wextra']) - for diag in tu.diagnostics: - print_diagnostic(diag, file=sys.stderr) - for node in tu.cursor.walk_preorder(): - try: - if node.kind == clang.cindex.CursorKind.FUNCTION_DECL: - # if node.spelling != "env_test_threads": - # continue - # Only interested in function declarations with WASM_EXPORT token. - if not has_annotation(node, 'wasm_export'): - continue - - if node.result_type.spelling != "void": - raise ValueError(f"Error: Function '{node.spelling}' must have a 'void' return type") - func = { - 'functionName': node.spelling, - 'inArgs': [ - { - 'name': arg.spelling, - 'type': arg.type.spelling, - } for arg in node.get_arguments() if arg.type.get_canonical().get_pointee().is_const_qualified() or arg.type.get_canonical().is_const_qualified() - ], - 'outArgs': [ - { - 'name': arg.spelling, - 'type': arg.type.spelling, - } for arg in node.get_arguments() if not (arg.type.get_canonical().get_pointee().is_const_qualified() or arg.type.get_canonical().is_const_qualified()) - ], - 'isAsync': has_annotation(node, 'async_wasm_export') - } - result.append(func) - except ValueError as e: - if not str(e).startswith("Unknown template argument kind"): - raise - return result - -if __name__ == '__main__': - file_list = [line.strip() for line in sys.stdin] - processed_data = process_files(file_list) - print(json.dumps(processed_data, indent=2)) diff --git a/barretenberg/ts/.gitignore b/barretenberg/ts/.gitignore index ccefa0908f25..cc254d3c8714 100644 --- a/barretenberg/ts/.gitignore +++ b/barretenberg/ts/.gitignore @@ -4,10 +4,11 @@ node_modules dest .tsbuildinfo* +*.tsbuildinfo *.log /crs package.tgz package # Generated files -src/cbind/generated/ \ No newline at end of file +src/cbind/generated/ diff --git a/barretenberg/ts/README.md b/barretenberg/ts/README.md index f6a0c1db0594..a87b970e5161 100644 --- a/barretenberg/ts/README.md +++ b/barretenberg/ts/README.md @@ -1,75 +1,21 @@ # bb.js -Prover/verifier executable and API for barretenberg. Default cli arguments are appropriate for running within Noir -project structures. +Prover/verifier library for barretenberg. It bundles support for the following: -## Performance and limitations +- x86_64 on linux. +- aarch64 on linux. +- x86_64 on macos. +- aarch64 on macos. +- Single-threaded WASM. +- Multi-threaded WASM. -Max circuit size is 2^19 gates (524,288). This is due to the underlying WASM 4GB memory limit. This should improve -with future proving systems, and/or introduction of wasm64. - -If running from node, or within browser where you can set shared memory COOP/COEP headers, multithreading is enabled. -Note there are two independent WASM builds, one with threading enabled and one without. This is because the shared -memory flag is set within the WASM itself. If you're running in a context where you can't have shared memory, we want -to fallback to single threaded performance. - -Table represents time in ms to build circuit and proof for each test on n threads. -Ignores proving key construction. - -``` -+--------------------------+------------+---------------+-----------+-----------+-----------+-----------+-----------+ -| Test | Gate Count | Subgroup Size | 1 | 4 | 16 | 32 | 64 | -+--------------------------+------------+---------------+-----------+-----------+-----------+-----------+-----------+ -| sha256 | 38799 | 65536 | 18764 | 5116 | 1854 | 1524 | 1635 | -| ecdsa_secp256k1 | 41049 | 65536 | 19129 | 5595 | 2255 | 2097 | 2166 | -| ecdsa_secp256r1 | 67331 | 131072 | 38815 | 11257 | 4744 | 3633 | 3702 | -| schnorr | 33740 | 65536 | 18649 | 5244 | 2019 | 1498 | 1702 | -| double_verify_proof | 505513 | 524288 | 149652 | 45702 | 20811 | 16979 | 15679 | -+--------------------------+------------+---------------+-----------+-----------+-----------+-----------+-----------+ -``` - -## Using as a standalone binary - -### Installing - -To install the package globally for running as a terminal application: - -``` -npm install -g @aztec/bb.js -``` - -Assuming `$(npm prefix -g)/bin` is in your `PATH`, you can now run the command `bb.js`. - -### Usage - -Run `bb.js` for further usage information, you'll see e.g. - -``` -% bb.js -Usage: bb.js [options] [command] - -Options: - -v, --verbose enable verbose logging (default: false) - -h, --help display help for command - -Commands: - prove_and_verify [options] Generate a proof and verify it. Process exits with success or failure code. - prove [options] Generate a proof and write it to a file. - gates [options] Print gate count to standard output. - verify [options] Verify a proof. Process exists with success or failure code. - contract [options] Output solidity verification key contract. - write_vk [options] Output verification key. - proof_as_fields [options] Return the proof as fields elements - vk_as_fields [options] Return the verification key represented as field elements. Also return the verification key hash. - help [command] display help for command -``` - -## Using as a library +If running within node.js on a support os/architecture we will use appropriate native code. +If running within node.js on an unsupported architecture we will fallback to multi-threaded WASM. +If running within the browser and served without COOP/COEP headers, we use the single-threaded WASM. +If running within the browser served with COOP/COEP headers, we use the multi-threaded WASM. ### Installing -To install as a package to be used as a library: - ``` npm install @aztec/bb.js ``` @@ -87,7 +33,7 @@ To create the API and do a blake2s hash: ```typescript import { Crs, Barretenberg, RawBuffer } from './index.js'; -const api = await Barretenberg.new(/* num_threads */ { threads: 1 }); +const api = await Barretenberg.new({ threads: 1 }); const input = Buffer.from('hello world!'); const result = await api.blake2s(input); await api.destroy(); @@ -134,18 +80,6 @@ Note that adding COOP and COEP headers will disable loading of external scripts, You can enable these headers for specific pages that perform proof generation, but this may be challenging, especially in single-page applications. One workaround is to move the proof generation to a separate page, load it in an invisible iframe within your main application, and then use `postMessage` to communicate between the pages for generating proofs. -## Development - -Create a symlink to the root script `bb.js-dev` in your path. You can now run the current state of the code from -anywhere in your filesystem with no `yarn build` required. - -If you change the C++ code run `yarn build:wasm` to rebuild the webassembly. - -To run the tests run `yarn test`. - -To run a continuous "stress test" run `yarn simple_test` to do 10 full pk/proof/vk iterations. This is useful for -inspecting memory growth as we continuously use the library. - ## Debugging Got an unhelpful stack trace in wasm? Run: diff --git a/barretenberg/ts/bb.js-dev b/barretenberg/ts/bb.js-dev deleted file mode 100755 index 5255fe4b3710..000000000000 --- a/barretenberg/ts/bb.js-dev +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -# Add a symlink to this somewhere in your path. -# Now you can run bb.js-dev anywhere to execute latest code, no 'yarn build' required. -SCRIPT_PATH=$(dirname $(realpath $0)) -export TS_NODE_PROJECT="$SCRIPT_PATH/tsconfig.json" -NODE_OPTIONS="--loader $SCRIPT_PATH/node_modules/ts-node/esm/transpile-only.mjs --no-warnings" node $SCRIPT_PATH/src/main.ts $@ diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index 2ab2792efe03..00bfc866f4a6 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -17,17 +17,16 @@ "files": [ "src/", "dest/", - "cjs-entry/", + "build/", "README.md" ], "scripts": { "clean": "rm -rf ./dest .tsbuildinfo .tsbuildinfo.cjs ./src/cbind/generated", "build": "yarn clean && yarn generate && yarn build:wasm && yarn build:esm && yarn build:cjs && yarn build:browser", "build:wasm": "./scripts/build_wasm.sh", - "build:esm": "tsc -b tsconfig.esm.json && chmod +x ./dest/node/main.js", + "build:esm": "tsc -b tsconfig.esm.json", "build:cjs": "tsc -b tsconfig.cjs.json && ./scripts/cjs_postprocess.sh", "build:browser": "tsc -b tsconfig.browser.json && ./scripts/browser_postprocess.sh", - "build:bindings": "cd .. && ./scripts/bindgen.sh", "generate": "NODE_OPTIONS='--loader ts-node/esm' NODE_NO_WARNINGS=1 ts-node src/cbind/generate.ts", "formatting": "prettier --check ./src && eslint --max-warnings 0 ./src", "formatting:fix": "prettier -w ./src", diff --git a/barretenberg/ts/scripts/build_wasm.sh b/barretenberg/ts/scripts/build_wasm.sh index 763e4d7ca585..858a6bae1d2a 100755 --- a/barretenberg/ts/scripts/build_wasm.sh +++ b/barretenberg/ts/scripts/build_wasm.sh @@ -6,7 +6,7 @@ set -e cd $(dirname $0)/.. if [ -z "$SKIP_CPP_BUILD" ] && [ "${CI:-0}" -eq 0 ]; then - parallel --line-buffered --tag 'denoise "../cpp/bootstrap.sh {}"' ::: build_wasm build_wasm_threads + parallel --line-buffered --tag '../cpp/bootstrap.sh {}' ::: build_wasm build_wasm_threads fi # Copy the wasm to its home in the bb.js dest folder. diff --git a/barretenberg/ts/scripts/kill_wrapper.sh b/barretenberg/ts/scripts/kill_wrapper.sh new file mode 100755 index 000000000000..c687a3d99578 --- /dev/null +++ b/barretenberg/ts/scripts/kill_wrapper.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +# Function to get the PPID in macOS +get_ppid_macos() { + ps -j $$ | awk 'NR==2 {print $3}' +} + +# Function to get the PPID in Linux +get_ppid_linux() { + awk '{print $4}' /proc/$$/stat +} + +# Function to check if a process is alive in macOS +is_process_alive_macos() { + ps -p $1 > /dev/null 2>&1 +} + +# Function to check if a process is alive in Linux +is_process_alive_linux() { + [ -d /proc/$1 ] +} + + +# Determine the operating system and call the appropriate function +if [[ "$OSTYPE" == "darwin"* ]]; then + PARENT_PID=$(get_ppid_macos) + check_process_alive() { is_process_alive_macos $1; } +elif [[ "$OSTYPE" == "linux-gnu"* ]]; then + PARENT_PID=$(get_ppid_linux) + check_process_alive() { is_process_alive_linux $1; } +else + echo "Unsupported OS" + exit 1 +fi + +# echo "Parent PID: $PARENT_PID" + +# Start command in the background. +$@ & +CHILD_PID=$! + +cleanup() { + kill $CHILD_PID +} + +trap cleanup EXIT + +# Continuously check if the parent process is still alive. +while check_process_alive $PARENT_PID; do + sleep 1 +done diff --git a/barretenberg/ts/src/barretenberg/__snapshots__/pedersen.test.ts.snap b/barretenberg/ts/src/barretenberg/__snapshots__/pedersen.test.ts.snap index f8bbf1364712..5526a5ff1813 100644 --- a/barretenberg/ts/src/barretenberg/__snapshots__/pedersen.test.ts.snap +++ b/barretenberg/ts/src/barretenberg/__snapshots__/pedersen.test.ts.snap @@ -1,7 +1,7 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing exports[`pedersen sync pedersenCommit 1`] = ` -Point { +{ "x": Fr { "value": Uint8Array [ 40, diff --git a/barretenberg/ts/src/barretenberg/__snapshots__/poseidon.test.ts.snap b/barretenberg/ts/src/barretenberg/__snapshots__/poseidon.test.ts.snap index 3af7544b2b80..09f3ec355fa7 100644 --- a/barretenberg/ts/src/barretenberg/__snapshots__/poseidon.test.ts.snap +++ b/barretenberg/ts/src/barretenberg/__snapshots__/poseidon.test.ts.snap @@ -1,8 +1,8 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing exports[`poseidon sync poseidonHash 1`] = ` -Fr { - "value": Uint8Array [ +{ + "hash": Uint8Array [ 43, 202, 235, diff --git a/barretenberg/ts/src/barretenberg/backend.ts b/barretenberg/ts/src/barretenberg/backend.ts index d182fa374ba2..de36898990fe 100644 --- a/barretenberg/ts/src/barretenberg/backend.ts +++ b/barretenberg/ts/src/barretenberg/backend.ts @@ -1,14 +1,6 @@ import { BackendOptions, Barretenberg, CircuitOptions } from './index.js'; -import { RawBuffer } from '../types/raw_buffer.js'; -import { - ProofData, - reconstructHonkProof, - splitHonkProof, - PAIRING_POINTS_SIZE, - uint8ArrayToHex, - hexToUint8Array, -} from '../proof/index.js'; -import { ClientIVCProof, fromClientIVCProof, toClientIVCProof } from '../cbind/generated/api_types.js'; +import { ProofData, uint8ArrayToHex, hexToUint8Array } from '../proof/index.js'; +import { fromClientIVCProof, toClientIVCProof } from '../cbind/generated/api_types.js'; import { ungzip } from 'pako'; import { Buffer } from 'buffer'; import { Decoder, Encoder } from 'msgpackr'; @@ -45,12 +37,20 @@ export type UltraHonkBackendOptions = { starknetZK?: boolean; }; -function getProofSettingsFromOptions( - options?: UltraHonkBackendOptions, -): { ipaAccumulation: boolean; oracleHashType: string; disableZk: boolean, optimizedSolidityVerifier: boolean } { +function getProofSettingsFromOptions(options?: UltraHonkBackendOptions): { + ipaAccumulation: boolean; + oracleHashType: string; + disableZk: boolean; + optimizedSolidityVerifier: boolean; +} { return { ipaAccumulation: false, - oracleHashType: options?.keccak || options?.keccakZK ? 'keccak' : (options?.starknet || options?.starknetZK ? 'starknet' : 'poseidon2'), + oracleHashType: + options?.keccak || options?.keccakZK + ? 'keccak' + : options?.starknet || options?.starknetZK + ? 'starknet' + : 'poseidon2', // TODO no current way to target non-zk poseidon2 hash disableZk: options?.keccak || options?.starknet ? true : false, optimizedSolidityVerifier: false, @@ -75,7 +75,10 @@ export class UltraHonkVerifierBackend { } } - async verifyProof(proofData: ProofData & { verificationKey: Uint8Array }, options?: UltraHonkBackendOptions): Promise { + async verifyProof( + proofData: ProofData & { verificationKey: Uint8Array }, + options?: UltraHonkBackendOptions, + ): Promise { await this.instantiate(); const proofFrs: Uint8Array[] = []; @@ -136,7 +139,7 @@ export class UltraHonkBackend { bytecode: Buffer.from(this.acirUncompressedBytecode), verificationKey: Buffer.from([]), // Empty VK - lower performance. }, - settings: getProofSettingsFromOptions(options) + settings: getProofSettingsFromOptions(options), }); console.log(`Generated proof for circuit with ${publicInputs.length} public inputs and ${proof.length} fields.`); @@ -164,7 +167,7 @@ export class UltraHonkBackend { }, settings: getProofSettingsFromOptions(options), }); - const {verified} = await this.api.circuitVerify({ + const { verified } = await this.api.circuitVerify({ verificationKey: vkResult.bytes, publicInputs: proofData.publicInputs.map(hexToUint8Array), proof: proofFrs, @@ -187,10 +190,13 @@ export class UltraHonkBackend { } /** @description Returns a solidity verifier */ - async getSolidityVerifier(vk?: Uint8Array): Promise { + async getSolidityVerifier(vk: Uint8Array, options?: UltraHonkBackendOptions): Promise { await this.instantiate(); - const vkBuf = vk ?? (await this.api.acirWriteVkUltraKeccakHonk(this.acirUncompressedBytecode)); - return await this.api.acirHonkSolidityVerifier(this.acirUncompressedBytecode, new RawBuffer(vkBuf)); + const result = await this.api.circuitWriteSolidityVerifier({ + verificationKey: vk, + settings: getProofSettingsFromOptions(options), + }); + return result.solidityCode; } // TODO(https://github.com/noir-lang/noir/issues/5661): Update this to handle Honk recursive aggregation in the browser once it is ready in the backend itself @@ -233,7 +239,7 @@ export class UltraHonkBackend { // We use an empty string for the vk hash here as it is unneeded as part of the recursive artifacts // The user can be expected to hash the vk inside their circuit to check whether the vk is the circuit // they expect - vkHash: uint8ArrayToHex(vkResult.hash) + vkHash: uint8ArrayToHex(vkResult.hash), }; } @@ -293,27 +299,26 @@ export class AztecClientBackend { name: functionName, bytecode: Buffer.from(bytecode), verificationKey: Buffer.from(vk), - } + }, }); // Accumulate with witness this.api.clientIvcAccumulate({ witness: Buffer.from(witness), }); - } - - // Generate the proof (and wait for all previous steps to finish) const proveResult = await this.api.clientIvcProve({}); // The API currently expects a msgpack-encoded API. - const proof = new Encoder({useRecords: false}).encode(fromClientIVCProof(proveResult.proof)); + const proof = new Encoder({ useRecords: false }).encode(fromClientIVCProof(proveResult.proof)); // Generate the VK - const vkResult = await this.api.clientIvcComputeIvcVk({ circuit: { - name: 'hiding', - bytecode: this.acirBuf[this.acirBuf.length - 1], - } }); + const vkResult = await this.api.clientIvcComputeIvcVk({ + circuit: { + name: 'hiding', + bytecode: this.acirBuf[this.acirBuf.length - 1], + }, + }); const proofFields = [ proveResult.proof.megaProof, @@ -333,7 +338,7 @@ export class AztecClientBackend { async verify(proof: Uint8Array, vk: Uint8Array): Promise { await this.instantiate(); const result = await this.api.clientIvcVerify({ - proof: toClientIVCProof(new Decoder({useRecords: false}).decode(proof)), + proof: toClientIVCProof(new Decoder({ useRecords: false }).decode(proof)), vk: Buffer.from(vk), }); return result.valid; @@ -348,7 +353,7 @@ export class AztecClientBackend { name: 'circuit', bytecode: buf, }, - includeGatesPerOpcode: false + includeGatesPerOpcode: false, }); circuitSizes.push(gates.circuitSize); } diff --git a/barretenberg/ts/src/barretenberg/blake2s.test.ts b/barretenberg/ts/src/barretenberg/blake2s.test.ts index 2e5a07883c4a..4ce04de9ac43 100644 --- a/barretenberg/ts/src/barretenberg/blake2s.test.ts +++ b/barretenberg/ts/src/barretenberg/blake2s.test.ts @@ -1,11 +1,12 @@ -import { Barretenberg, BarretenbergSync } from './index.js'; -import { Buffer32, Fr } from '../types/index.js'; +import { BackendType, Barretenberg, BarretenbergSync } from './index.js'; +import { Fr } from '../types/index.js'; describe('blake2s async', () => { let api: Barretenberg; beforeAll(async () => { - api = await Barretenberg.new({ threads: 1 }); + // We're going to test over a worker backend to cover more code paths. + api = await Barretenberg.new({ threads: 1, backend: BackendType.WasmWorker }); }); afterAll(async () => { @@ -14,14 +15,12 @@ describe('blake2s async', () => { it('blake2s', async () => { const input = Buffer.from('abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789'); - const expected = Buffer32.fromBuffer( - new Uint8Array([ - 0x44, 0xdd, 0xdb, 0x39, 0xbd, 0xb2, 0xaf, 0x80, 0xc1, 0x47, 0x89, 0x4c, 0x1d, 0x75, 0x6a, 0xda, 0x3d, 0x1c, - 0x2a, 0xc2, 0xb1, 0x00, 0x54, 0x1e, 0x04, 0xfe, 0x87, 0xb4, 0xa5, 0x9e, 0x12, 0x43, - ]), - ); - const result = await api.blake2s(input); - expect(result).toEqual(expected); + const expected = new Uint8Array([ + 0x44, 0xdd, 0xdb, 0x39, 0xbd, 0xb2, 0xaf, 0x80, 0xc1, 0x47, 0x89, 0x4c, 0x1d, 0x75, 0x6a, 0xda, 0x3d, 0x1c, 0x2a, + 0xc2, 0xb1, 0x00, 0x54, 0x1e, 0x04, 0xfe, 0x87, 0xb4, 0xa5, 0x9e, 0x12, 0x43, + ]); + const response = await api.blake2s({ data: input }); + expect(response.hash).toEqual(expected); }); it('blake2sToField', async () => { @@ -32,7 +31,8 @@ describe('blake2s async', () => { 0x2a, 0xc2, 0xb1, 0x00, 0x54, 0x1e, 0x04, 0xfe, 0x87, 0xb4, 0xa5, 0x9e, 0x12, 0x43, ]), ); - const result = await api.blake2sToField(input); + const response = await api.blake2sToField({ data: input }); + const result = Fr.fromBuffer(response.field); expect(result).toEqual(expected); }); }); @@ -46,14 +46,12 @@ describe('blake2s sync', () => { it('blake2s', () => { const input = Buffer.from('abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789'); - const expected = Buffer32.fromBuffer( - new Uint8Array([ - 0x44, 0xdd, 0xdb, 0x39, 0xbd, 0xb2, 0xaf, 0x80, 0xc1, 0x47, 0x89, 0x4c, 0x1d, 0x75, 0x6a, 0xda, 0x3d, 0x1c, - 0x2a, 0xc2, 0xb1, 0x00, 0x54, 0x1e, 0x04, 0xfe, 0x87, 0xb4, 0xa5, 0x9e, 0x12, 0x43, - ]), - ); - const result = api.blake2s(input); - expect(result).toEqual(expected); + const expected = new Uint8Array([ + 0x44, 0xdd, 0xdb, 0x39, 0xbd, 0xb2, 0xaf, 0x80, 0xc1, 0x47, 0x89, 0x4c, 0x1d, 0x75, 0x6a, 0xda, 0x3d, 0x1c, 0x2a, + 0xc2, 0xb1, 0x00, 0x54, 0x1e, 0x04, 0xfe, 0x87, 0xb4, 0xa5, 0x9e, 0x12, 0x43, + ]); + const response = api.blake2s({ data: input }); + expect(response.hash).toEqual(expected); }); it('blake2sToField', () => { @@ -64,7 +62,8 @@ describe('blake2s sync', () => { 0x2a, 0xc2, 0xb1, 0x00, 0x54, 0x1e, 0x04, 0xfe, 0x87, 0xb4, 0xa5, 0x9e, 0x12, 0x43, ]), ); - const result = api.blake2sToField(input); + const response = api.blake2sToField({ data: input }); + const result = Fr.fromBuffer(response.field); expect(result).toEqual(expected); }); }); diff --git a/barretenberg/ts/src/barretenberg/common.test.ts b/barretenberg/ts/src/barretenberg/common.test.ts deleted file mode 100644 index 96f745566687..000000000000 --- a/barretenberg/ts/src/barretenberg/common.test.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { Barretenberg } from './index.js'; - -describe('env', () => { - let api: Barretenberg; - - beforeAll(async () => { - api = await Barretenberg.new({ threads: 3 }); - }, 30000); - - afterAll(async () => { - if (api) { - await api.destroy(); - } - }); - - it('thread test', async () => { - // Main thread doesn't do anything in this test, so -1. - const threads = (await api.getNumThreads()) - 1; - const iterations = 100000; - const result = await api.testThreads(threads, iterations); - expect(result).toBe(iterations); - }); -}); diff --git a/barretenberg/ts/src/barretenberg/index.ts b/barretenberg/ts/src/barretenberg/index.ts index 6454512559c2..70dd0c4c96ff 100644 --- a/barretenberg/ts/src/barretenberg/index.ts +++ b/barretenberg/ts/src/barretenberg/index.ts @@ -1,33 +1,13 @@ -import { proxy } from 'comlink'; -import { BarretenbergApi, BarretenbergApiSync } from '../barretenberg_api/index.js'; -import { createMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/factory/node/index.js'; -import { BarretenbergWasmMain, BarretenbergWasmMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/index.js'; -import { getRemoteBarretenbergWasm } from '../barretenberg_wasm/helpers/index.js'; import { Crs, GrumpkinCrs } from '../crs/index.js'; -import { RawBuffer } from '../types/raw_buffer.js'; -import { fetchModuleAndThreads } from '../barretenberg_wasm/index.js'; import { createDebugLogger } from '../log/index.js'; import { AsyncApi } from '../cbind/generated/async.js'; -import { BbApiBase, CircuitComputeVk, CircuitProve, CircuitVerify, ClientIvcAccumulate, ClientIvcComputeIvcVk, ClientIvcStats, ClientIvcLoad, ClientIvcProve, ClientIvcStart, ClientIvcVerify, VkAsFields } from '../cbind/generated/api_types.js'; +import { SyncApi } from '../cbind/generated/sync.js'; +import { IMsgpackBackendSync, IMsgpackBackendAsync } from '../bb_backends/interface.js'; +import { BackendOptions, BackendType } from '../bb_backends/index.js'; +import { createAsyncBackend, createSyncBackend } from '../bb_backends/node/index.js'; export { UltraHonkBackend, UltraHonkVerifierBackend, AztecClientBackend } from './backend.js'; - -export type BackendOptions = { - /** @description Number of threads to run the backend worker on */ - threads?: number; - - /** @description Initial and Maximum memory to be alloted to the backend worker */ - memory?: { initial?: number; maximum?: number }; - - /** @description Path to download CRS files */ - crsPath?: string; - - /** @description Path to download WASM files */ - wasmPath?: string; - - /** @description Logging function */ - logger?: (msg: string) => void; -}; +export * from '../bb_backends/index.js'; export type CircuitOptions = { /** @description Whether to produce SNARK friendly proofs */ @@ -38,42 +18,46 @@ export type CircuitOptions = { * The main class library consumers interact with. * It extends the generated api, and provides a static constructor "new" to compose components. */ -export class Barretenberg extends BarretenbergApi { +export class Barretenberg extends AsyncApi { private options: BackendOptions; - private bbApi: BbApiBase; - - private constructor( - private worker: any, - wasm: BarretenbergWasmMainWorker, - options: BackendOptions, - ) { - super(wasm); + + constructor(backend: IMsgpackBackendAsync, options: BackendOptions) { + super(backend); this.options = options; - this.bbApi = new AsyncApi(wasm); } /** * Constructs an instance of Barretenberg. - * Launches it within a worker. This is necessary as it blocks waiting on child threads to complete, - * and blocking the main thread in the browser is not allowed. - * It threads > 1 (defaults to hardware availability), child threads will be created on their own workers. + * + * If options.backend is set: uses that specific backend (throws if unavailable) + * If options.backend is unset: tries backends in order with fallback: + * 1. NativeSharedMemory (if bb binary available) + * 2. WasmWorker (in browser) or Wasm (in Node.js) */ static async new(options: BackendOptions = {}) { - const worker = await createMainWorker(); - const wasm = getRemoteBarretenbergWasm(worker); - const { module, threads } = await fetchModuleAndThreads(options.threads, options.wasmPath, options.logger); - await wasm.init( - module, - threads, - proxy(options.logger ?? createDebugLogger('bb_wasm_async')), - options.memory?.initial, - options.memory?.maximum, - ); - return new Barretenberg(worker, wasm, options); - } + const logger = options.logger ?? createDebugLogger('bb_async'); + + if (options.backend) { + // Explicit backend required - no fallback + return await createAsyncBackend(options.backend, options, logger); + } - async getNumThreads() { - return await this.wasm.getNumThreads(); + if (typeof window === 'undefined') { + try { + return await createAsyncBackend(BackendType.NativeSharedMemory, options, logger); + } catch (err: any) { + logger(`Shared memory unavailable (${err.message}), falling back to other backends`); + try { + return await createAsyncBackend(BackendType.NativeUnixSocket, options, logger); + } catch (err: any) { + logger(`Unix socket unavailable (${err.message}), falling back to WASM`); + return await createAsyncBackend(BackendType.Wasm, options, logger); + } + } + } else { + logger(`In browser, using WASM over worker backend.`); + return await createAsyncBackend(BackendType.WasmWorker, options, logger); + } } async initSRSForCircuitSize(circuitSize: number): Promise { @@ -81,7 +65,7 @@ export class Barretenberg extends BarretenbergApi { const crs = await Crs.new(Math.max(circuitSize, minSRSSize) + 1, this.options.crsPath, this.options.logger); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1129): Do slab allocator initialization? // await this.commonInitSlabAllocator(circuitSize); - await this.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); + await this.srsInitSrs({ pointsBuf: crs.getG1Data(), numPoints: crs.numPoints, g2Point: crs.getG2Data() }); } async initSRSClientIVC(srsSize = this.getDefaultSrsSize()): Promise { @@ -91,8 +75,8 @@ export class Barretenberg extends BarretenbergApi { // Load CRS into wasm global CRS state. // TODO: Make RawBuffer be default behavior, and have a specific Vector type for when wanting length prefixed. - await this.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - await this.srsInitGrumpkinSrs(new RawBuffer(grumpkinCrs.getG1Data()), grumpkinCrs.numPoints); + await this.srsInitSrs({ pointsBuf: crs.getG1Data(), numPoints: crs.numPoints, g2Point: crs.getG2Data() }); + await this.srsInitGrumpkinSrs({ pointsBuf: grumpkinCrs.getG1Data(), numPoints: grumpkinCrs.numPoints }); } getDefaultSrsSize(): number { @@ -104,101 +88,136 @@ export class Barretenberg extends BarretenbergApi { return 2 ** 20; } + async acirGetCircuitSizes( + bytecode: Uint8Array, + recursive: boolean, + honkRecursion: boolean, + ): Promise<[number, number]> { + const response = await this.circuitStats({ + circuit: { name: '', bytecode, verificationKey: new Uint8Array() }, + includeGatesPerOpcode: false, + settings: { + ipaAccumulation: false, + oracleHashType: honkRecursion ? 'poseidon2' : 'keccak', + disableZk: !recursive, + optimizedSolidityVerifier: false, + }, + }); + return [response.numGates, response.numGatesDyadic]; + } + async acirInitSRS(bytecode: Uint8Array, recursive: boolean, honkRecursion: boolean): Promise { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const [_total, subgroupSize] = await this.acirGetCircuitSizes(bytecode, recursive, honkRecursion); + const [_, subgroupSize] = await this.acirGetCircuitSizes(bytecode, recursive, honkRecursion); return this.initSRSForCircuitSize(subgroupSize); } async destroy() { - await this.wasm.destroy(); - await this.worker.terminate(); - } - - getWasm() { - return this.wasm; - } - - // Wrap ClientIVC methods used by AztecClientBackend and UltraHonkBackend - async clientIvcStart(command: ClientIvcStart) { - return this.bbApi.clientIvcStart(command); - } - - async clientIvcLoad(command: ClientIvcLoad) { - return this.bbApi.clientIvcLoad(command); - } - - async clientIvcAccumulate(command: ClientIvcAccumulate) { - return this.bbApi.clientIvcAccumulate(command); + return super.destroy(); } - async clientIvcProve(command: ClientIvcProve) { - return this.bbApi.clientIvcProve(command); - } - - async clientIvcVerify(command: ClientIvcVerify) { - return this.bbApi.clientIvcVerify(command); - } - - async clientIvcComputeIvcVk(command: ClientIvcComputeIvcVk) { - return this.bbApi.clientIvcComputeIvcVk(command); + /** + * Initialize the singleton instance of Barretenberg. + * @param options Backend configuration options + */ + static async initSingleton(options: BackendOptions = {}) { + if (!barretenbergSingletonPromise) { + barretenbergSingletonPromise = Barretenberg.new(options); + } + try { + barretenbergSingleton = await barretenbergSingletonPromise; + return barretenbergSingleton; + } catch (error) { + // If initialization fails, clear the singleton so next call can retry + barretenbergSingleton = undefined!; + barretenbergSingletonPromise = undefined!; + throw error; + } } - async clientIvcStats(command: ClientIvcStats) { - return this.bbApi.clientIvcStats(command); + static async destroySingleton() { + if (barretenbergSingleton) { + await barretenbergSingleton.destroy(); + barretenbergSingleton = undefined!; + barretenbergSingletonPromise = undefined!; + } } - // Wrap circuit methods used by BbApiUltraHonkBackend - async circuitProve(command: CircuitProve) { - return this.bbApi.circuitProve(command); + /** + * Get the singleton instance of Barretenberg. + * Must call initSingleton() first. + */ + static getSingleton() { + if (!barretenbergSingleton) { + throw new Error('First call Barretenberg.initSingleton() on @aztec/bb.js module.'); + } + return barretenbergSingleton; } +} - async circuitComputeVk(command: CircuitComputeVk) { - return this.bbApi.circuitComputeVk(command); - } +let barretenbergSingletonPromise: Promise; +let barretenbergSingleton: Barretenberg; - async circuitVerify(command: CircuitVerify) { - return this.bbApi.circuitVerify(command); - } +let barretenbergSyncSingletonPromise: Promise; +let barretenbergSyncSingleton: BarretenbergSync; - async vkAsFields(command: VkAsFields) { - return this.bbApi.vkAsFields(command); +export class BarretenbergSync extends SyncApi { + constructor(backend: IMsgpackBackendSync) { + super(backend); } -} + /** + * Create a new BarretenbergSync instance. + * + * If options.backend is set: uses that specific backend (throws if unavailable) + * If options.backend is unset: tries backends in order with fallback: + * 1. NativeSharedMem (if bb binary + NAPI module available) + * 2. Wasm + * + * Supported backends: Wasm, NativeSharedMem + * Not supported: WasmWorker (no workers in sync), NativeUnixSocket (async only) + */ + static async new(options: BackendOptions = {}) { + const logger = options.logger ?? createDebugLogger('bb_sync'); -let barretenbergSyncSingletonPromise: Promise; -let barretenbergSyncSingleton: BarretenbergSync; + if (options.backend) { + return await createSyncBackend(options.backend, options, logger); + } -export class BarretenbergSync extends BarretenbergApiSync { - private constructor(wasm: BarretenbergWasmMain) { - super(wasm); - } + // Try native, fallback to WASM. + try { + return await createSyncBackend(BackendType.NativeSharedMemory, options, logger); + } catch (err: any) { + logger(`Shared memory unavailable (${err.message}), falling back to WASM`); + } - private static async new(wasmPath?: string, logger: (msg: string) => void = createDebugLogger('bb_wasm_sync')) { - const wasm = new BarretenbergWasmMain(); - const { module, threads } = await fetchModuleAndThreads(1, wasmPath, logger); - await wasm.init(module, threads, logger); - return new BarretenbergSync(wasm); + return await createSyncBackend(BackendType.Wasm, options, logger); } - static async initSingleton(wasmPath?: string, logger: (msg: string) => void = createDebugLogger('bb_wasm_sync')) { + /** + * Initialize the singleton instance. + * @param options Backend configuration options + */ + static async initSingleton(options: BackendOptions = {}) { if (!barretenbergSyncSingletonPromise) { - barretenbergSyncSingletonPromise = BarretenbergSync.new(wasmPath, logger); + barretenbergSyncSingletonPromise = BarretenbergSync.new(options); } barretenbergSyncSingleton = await barretenbergSyncSingletonPromise; return barretenbergSyncSingleton; } + static destroySingleton() { + if (barretenbergSyncSingleton) { + barretenbergSyncSingleton.destroy(); + barretenbergSyncSingleton = undefined!; + barretenbergSyncSingletonPromise = undefined!; + } + } + static getSingleton() { if (!barretenbergSyncSingleton) { throw new Error('First call BarretenbergSync.initSingleton() on @aztec/bb.js module.'); } return barretenbergSyncSingleton; } - - getWasm() { - return this.wasm; - } } diff --git a/barretenberg/ts/src/barretenberg/pedersen.test.ts b/barretenberg/ts/src/barretenberg/pedersen.test.ts index 7c9c81091ac2..bf394401261a 100644 --- a/barretenberg/ts/src/barretenberg/pedersen.test.ts +++ b/barretenberg/ts/src/barretenberg/pedersen.test.ts @@ -10,7 +10,8 @@ describe('pedersen sync', () => { }); it('pedersenHash', () => { - const result = api.pedersenHash([new Fr(4n), new Fr(8n)], 7); + const response = api.pedersenHash({ inputs: [new Fr(4n).toBuffer(), new Fr(8n).toBuffer()], hashIndex: 7 }); + const result = Fr.fromBuffer(response.hash); expect(result).toMatchSnapshot(); }); @@ -19,34 +20,40 @@ describe('pedersen sync', () => { const fields = Array.from({ length: loops * 2 }).map(() => Fr.random()); const t = new Timer(); for (let i = 0; i < loops; ++i) { - api.pedersenHash([fields[i * 2], fields[i * 2 + 1]], 0); + api.pedersenHash({ inputs: [fields[i * 2].toBuffer(), fields[i * 2 + 1].toBuffer()], hashIndex: 0 }); } const us = t.us() / loops; console.log(`Executed ${loops} hashes at an average ${us}us / hash`); }); - it('pedersenHashes perf test', () => { - const loops = 10; - const numHashesPerLoop = 1024; - const fields = Array.from({ length: numHashesPerLoop * 2 }).map(() => Fr.random()); - const t = new Timer(); - for (let i = 0; i < loops; ++i) { - api.pedersenHashes(fields, 0); - } - const us = t.us() / (numHashesPerLoop * loops); - console.log(`Executed ${numHashesPerLoop * loops} hashes at an average ${us}us / hash`); - }); + // TODO: pedersenHashes not yet in new msgpack API + // it.skip('pedersenHashes perf test', () => { + // const loops = 10; + // const numHashesPerLoop = 1024; + // const fields = Array.from({ length: numHashesPerLoop * 2 }).map(() => Fr.random()); + // const t = new Timer(); + // for (let i = 0; i < loops; ++i) { + // // api.pedersenHashes(fields, 0); // Not in new API yet + // } + // const us = t.us() / (numHashesPerLoop * loops); + // console.log(`Executed ${numHashesPerLoop * loops} hashes at an average ${us}us / hash`); + // }); it('pedersenHashBuffer', () => { const input = Buffer.alloc(123); input.writeUint32BE(321, 0); input.writeUint32BE(456, 119); - const r = api.pedersenHashBuffer(input, 0); + const response = api.pedersenHashBuffer({ input, hashIndex: 0 }); + const r = Fr.fromBuffer(response.hash); expect(r).toMatchSnapshot(); }); it('pedersenCommit', () => { - const result = api.pedersenCommit([new Fr(4n), new Fr(8n), new Fr(12n)], 0); + const response = api.pedersenCommit({ + inputs: [new Fr(4n).toBuffer(), new Fr(8n).toBuffer(), new Fr(12n).toBuffer()], + hashIndex: 0, + }); + const result = { x: Fr.fromBuffer(response.point.x), y: Fr.fromBuffer(response.point.y) }; expect(result).toMatchSnapshot(); }); @@ -55,7 +62,7 @@ describe('pedersen sync', () => { const fields = Array.from({ length: loops * 2 }).map(() => Fr.random()); const t = new Timer(); for (let i = 0; i < loops; ++i) { - api.pedersenCommit([fields[i * 2], fields[i * 2 + 1]], 0); + api.pedersenCommit({ inputs: [fields[i * 2].toBuffer(), fields[i * 2 + 1].toBuffer()], hashIndex: 0 }); } console.log(t.us() / loops); }); diff --git a/barretenberg/ts/src/barretenberg/poseidon.bench.test.ts b/barretenberg/ts/src/barretenberg/poseidon.bench.test.ts new file mode 100644 index 000000000000..c31271222168 --- /dev/null +++ b/barretenberg/ts/src/barretenberg/poseidon.bench.test.ts @@ -0,0 +1,267 @@ +import { Barretenberg, BarretenbergSync, Fr } from '../index.js'; +import { serializeBufferable } from '../serialize/index.js'; +import { BarretenbergWasmMain } from '../barretenberg_wasm/barretenberg_wasm_main/index.js'; +import { fetchModuleAndThreads } from '../barretenberg_wasm/index.js'; +import { BackendType } from './index.js'; + +/** + * Async API benchmark test: WASM vs Native backends with proper non-blocking I/O + * + * This test uses the async Barretenberg API which properly handles: + * - Non-blocking I/O for native backend (event-based) + * - Concurrent operations via promises + * - Better performance for native backend compared to sync API + */ +describe('poseidon2Hash benchmark (Async API): WASM vs Native', () => { + const ITERATIONS = 10000; + const SIZES = [2, 4, 8]; + + let wasmApi: Barretenberg | null = null; + let nativeSocketApi: Barretenberg | null = null; + let nativeShmApi: Barretenberg | null = null; + let nativeShmSyncApi: BarretenbergSync | null = null; + let wasm: BarretenbergWasmMain; + + beforeAll(async () => { + // Setup direct WASM access for baseline benchmark (always required) + wasm = new BarretenbergWasmMain(); + const { module } = await fetchModuleAndThreads(1); + await wasm.init(module, 1); + + // Setup WASM API + try { + wasmApi = await Barretenberg.new({ backend: BackendType.Wasm, threads: 1 }); + } catch (error) { + console.warn('Failed to initialize WASM backend:', error instanceof Error ? error.message : String(error)); + } + + // Setup native socket API + try { + nativeSocketApi = await Barretenberg.new({ backend: BackendType.NativeUnixSocket, threads: 1 }); + } catch (error) { + console.warn( + 'Failed to initialize Native Socket backend:', + error instanceof Error ? error.message : String(error), + ); + } + + // Setup native shared memory API (async) + try { + nativeShmApi = await Barretenberg.new({ backend: BackendType.NativeSharedMemory, threads: 1 }); + } catch (error) { + console.warn( + 'Failed to initialize Native Shared Memory (async) backend:', + error instanceof Error ? error.message : String(error), + ); + } + + // Setup native shared memory API (sync) + try { + nativeShmSyncApi = await BarretenbergSync.new({ backend: BackendType.NativeSharedMemory, threads: 1 }); + } catch (error) { + console.warn( + 'Failed to initialize Native Shared Memory (sync) backend:', + error instanceof Error ? error.message : String(error), + ); + } + }, 20000); + + afterAll(async () => { + await wasm.destroy(); + if (wasmApi) { + await wasmApi.destroy(); + } + if (nativeSocketApi) { + await nativeSocketApi.destroy(); + } + if (nativeShmApi) { + await nativeShmApi.destroy(); + } + if (nativeShmSyncApi) { + nativeShmSyncApi.destroy(); + } + }); + + async function directPoseidon2Hash(inputsBuffer: Fr[]): Promise { + const inArgs = [inputsBuffer].map(serializeBufferable); + const outTypes = [Fr]; + const result = wasm.callWasmExport( + 'poseidon2_hash', + inArgs, + outTypes.map(t => t.SIZE_IN_BYTES), + ); + const out = result.map((r, i) => outTypes[i].fromBuffer(r)); + return Promise.resolve(out[0]); + } + + it.each(SIZES)('benchmark with %p field elements', async size => { + // Generate random inputs + const inputs = Array(size) + .fill(0) + .map(() => Fr.random()); + + // Benchmark 1: Direct WASM (baseline - always available) + const directStart = performance.now(); + for (let i = 0; i < ITERATIONS; i++) { + await directPoseidon2Hash(inputs); + } + const directTime = performance.now() - directStart; + + // Benchmark 2: WASM (async) + let wasmTime = 0; + if (wasmApi) { + const wasmStart = performance.now(); + for (let i = 0; i < ITERATIONS; i++) { + await wasmApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + } + wasmTime = performance.now() - wasmStart; + } + + // Benchmark 3: Native Socket (async with non-blocking I/O) + let nativeSocketTime = 0; + if (nativeSocketApi) { + const nativeSocketStart = performance.now(); + for (let i = 0; i < ITERATIONS; i++) { + await nativeSocketApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + } + nativeSocketTime = performance.now() - nativeSocketStart; + } + + // Benchmark 4: Native Shared Memory (async) + let nativeShmTime = 0; + if (nativeShmApi) { + const nativeShmStart = performance.now(); + for (let i = 0; i < ITERATIONS; i++) { + await nativeShmApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + } + nativeShmTime = performance.now() - nativeShmStart; + } + + // Benchmark 5: Native Shared Memory (sync) + let nativeShmSyncTime = 0; + if (nativeShmSyncApi) { + const nativeShmSyncStart = performance.now(); + for (let i = 0; i < ITERATIONS; i++) { + nativeShmSyncApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + } + nativeShmSyncTime = performance.now() - nativeShmSyncStart; + } + + // Calculate metrics (all relative to WASM baseline) + const directOverhead = ((directTime - wasmTime) / wasmTime) * 100; + const nativeSocketOverhead = ((nativeSocketTime - wasmTime) / wasmTime) * 100; + const nativeShmOverhead = ((nativeShmTime - wasmTime) / wasmTime) * 100; + const nativeShmSyncOverhead = ((nativeShmSyncTime - wasmTime) / wasmTime) * 100; + + const avgDirectTimeUs = (directTime / ITERATIONS) * 1000; // microseconds + const avgWasmTimeUs = (wasmTime / ITERATIONS) * 1000; + const avgNativeSocketTimeUs = (nativeSocketTime / ITERATIONS) * 1000; + const avgNativeShmTimeUs = (nativeShmTime / ITERATIONS) * 1000; + const avgNativeShmSyncTimeUs = (nativeShmSyncTime / ITERATIONS) * 1000; + + process.stdout.write(`┌─ Size ${size.toString().padStart(3)} field elements ──────────────────────────────────┐\n`); + const formatOverhead = (overhead: number): string => { + const sign = overhead >= 0 ? '+' : '-'; + const value = Math.abs(overhead).toFixed(1).padStart(6); + return `${sign}${value}%`; + }; + + if (wasmApi) { + process.stdout.write( + `│ WASM: ${wasmTime.toFixed(2).padStart(8)}ms (${avgWasmTimeUs.toFixed(2).padStart(7)}µs/call) [baseline] │\n`, + ); + } else { + process.stdout.write(`│ WASM: unavailable │\n`); + } + + process.stdout.write( + `│ Direct WASM: ${directTime.toFixed(2).padStart(8)}ms (${avgDirectTimeUs.toFixed(2).padStart(7)}µs/call) ${formatOverhead(directOverhead)} │\n`, + ); + + if (nativeSocketApi) { + process.stdout.write( + `│ Native Socket: ${nativeSocketTime.toFixed(2).padStart(8)}ms (${avgNativeSocketTimeUs.toFixed(2).padStart(7)}µs/call) ${formatOverhead(nativeSocketOverhead)} │\n`, + ); + } else { + process.stdout.write(`│ Native Socket: unavailable │\n`); + } + + if (nativeShmApi) { + process.stdout.write( + `│ Native Shared: ${nativeShmTime.toFixed(2).padStart(8)}ms (${avgNativeShmTimeUs.toFixed(2).padStart(7)}µs/call) ${formatOverhead(nativeShmOverhead)} │\n`, + ); + } else { + process.stdout.write(`│ Native Shared: unavailable │\n`); + } + + if (nativeShmSyncApi) { + process.stdout.write( + `│ Native Shared Sync: ${nativeShmSyncTime.toFixed(2).padStart(8)}ms (${avgNativeShmSyncTimeUs.toFixed(2).padStart(7)}µs/call) ${formatOverhead(nativeShmSyncOverhead)} │\n`, + ); + } else { + process.stdout.write(`│ Native Shared Sync: unavailable │\n`); + } + + process.stdout.write(`└────────────────────────────────────────────────────────────┘\n`); + + // Sanity check: verify all backends produce same result as direct WASM + const directResult = await directPoseidon2Hash(inputs); + + if (wasmApi) { + const wasmResult = await wasmApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(wasmResult.hash)).toEqual(directResult.toBuffer()); + } + + if (nativeSocketApi) { + const nativeSocketResult = await nativeSocketApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(nativeSocketResult.hash)).toEqual(directResult.toBuffer()); + } + + if (nativeShmApi) { + const nativeShmResult = await nativeShmApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(nativeShmResult.hash)).toEqual(directResult.toBuffer()); + } + + if (nativeShmSyncApi) { + const nativeShmSyncResult = nativeShmSyncApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(nativeShmSyncResult.hash)).toEqual(directResult.toBuffer()); + } + + // Test always passes, this is just for measuring performance + expect(true).toBe(true); + }); + + const TEST_VECTORS = [1, 2, 3, 5, 10, 50, 100]; + const NUM_RANDOM_TESTS = 10; + + it.each(TEST_VECTORS)('produces identical results for %p field elements', async size => { + // Test with multiple random input vectors + for (let test = 0; test < NUM_RANDOM_TESTS; test++) { + const inputs = Array(size) + .fill(0) + .map(() => Fr.random()); + + const directResult = await directPoseidon2Hash(inputs); + + if (wasmApi) { + const wasmResult = await wasmApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(wasmResult.hash)).toEqual(directResult.toBuffer()); + } + + if (nativeSocketApi) { + const nativeSocketResult = await nativeSocketApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(nativeSocketResult.hash)).toEqual(directResult.toBuffer()); + } + + if (nativeShmApi) { + const nativeShmResult = await nativeShmApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(nativeShmResult.hash)).toEqual(directResult.toBuffer()); + } + + if (nativeShmSyncApi) { + const nativeShmSyncResult = nativeShmSyncApi.poseidon2Hash({ inputs: inputs.map(fr => fr.toBuffer()) }); + expect(Buffer.from(nativeShmSyncResult.hash)).toEqual(directResult.toBuffer()); + } + } + }); +}); diff --git a/barretenberg/ts/src/barretenberg/poseidon.test.ts b/barretenberg/ts/src/barretenberg/poseidon.test.ts index b38cfd5b0241..c6a3ce7f8726 100644 --- a/barretenberg/ts/src/barretenberg/poseidon.test.ts +++ b/barretenberg/ts/src/barretenberg/poseidon.test.ts @@ -10,30 +10,30 @@ describe('poseidon sync', () => { }); it('poseidonHash', () => { - const result = api.poseidon2Hash([new Fr(4n), new Fr(8n)]); + const result = api.poseidon2Hash({ inputs: [new Fr(4n).toBuffer(), new Fr(8n).toBuffer()] }); expect(result).toMatchSnapshot(); }); it('poseidonHash perf test', () => { const loops = 1000; - const fields = Array.from({ length: loops * 2 }).map(() => Fr.random()); + const fields = Array.from({ length: loops * 2 }).map(() => Fr.random().toBuffer()); const t = new Timer(); for (let i = 0; i < loops; ++i) { - api.poseidon2Hash([fields[i * 2], fields[i * 2 + 1]]); + api.poseidon2Hash({ inputs: [fields[i * 2], fields[i * 2 + 1]] }); } const us = t.us() / loops; console.log(`Executed ${loops} hashes at an average ${us}us / hash`); }); - it('poseidonHashes perf test', () => { - const loops = 10; - const numHashesPerLoop = 1024; - const fields = Array.from({ length: numHashesPerLoop * 2 }).map(() => Fr.random()); - const t = new Timer(); - for (let i = 0; i < loops; ++i) { - api.poseidon2Hashes(fields); - } - const us = t.us() / (numHashesPerLoop * loops); - console.log(`Executed ${numHashesPerLoop * loops} hashes at an average ${us}us / hash`); - }); + // it('poseidonHashes perf test', () => { + // const loops = 10; + // const numHashesPerLoop = 1024; + // const fields = Array.from({ length: numHashesPerLoop * 2 }).map(() => Fr.random()); + // const t = new Timer(); + // for (let i = 0; i < loops; ++i) { + // api.poseidon2Hashes(fields); + // } + // const us = t.us() / (numHashesPerLoop * loops); + // console.log(`Executed ${numHashesPerLoop * loops} hashes at an average ${us}us / hash`); + // }); }); diff --git a/barretenberg/ts/src/barretenberg_api/index.ts b/barretenberg/ts/src/barretenberg_api/index.ts deleted file mode 100644 index 4b77d1c87b72..000000000000 --- a/barretenberg/ts/src/barretenberg_api/index.ts +++ /dev/null @@ -1,1216 +0,0 @@ -// WARNING: FILE CODE GENERATED BY BINDGEN UTILITY. DO NOT EDIT! -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { BarretenbergWasmMain, BarretenbergWasmMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/index.js'; -import { - BufferDeserializer, - NumberDeserializer, - VectorDeserializer, - BoolDeserializer, - StringDeserializer, - serializeBufferable, - OutputType, -} from '../serialize/index.js'; -import { Fr, Point, Buffer32, Ptr } from '../types/index.js'; - -export class BarretenbergApi { - constructor(protected wasm: BarretenbergWasmMainWorker) {} - - async pedersenCommit(inputsBuffer: Fr[], ctxIndex: number): Promise { - const inArgs = [inputsBuffer, ctxIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Point]; - const result = await this.wasm.callWasmExport( - 'pedersen_commit', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async pedersenHash(inputsBuffer: Fr[], hashIndex: number): Promise { - const inArgs = [inputsBuffer, hashIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = await this.wasm.callWasmExport( - 'pedersen_hash', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async pedersenHashes(inputsBuffer: Fr[], hashIndex: number): Promise { - const inArgs = [inputsBuffer, hashIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = await this.wasm.callWasmExport( - 'pedersen_hashes', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async pedersenHashBuffer(inputBuffer: Uint8Array, hashIndex: number): Promise { - const inArgs = [inputBuffer, hashIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = await this.wasm.callWasmExport( - 'pedersen_hash_buffer', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async poseidon2Hash(inputsBuffer: Fr[]): Promise { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = await this.wasm.callWasmExport( - 'poseidon2_hash', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async poseidon2Hashes(inputsBuffer: Fr[]): Promise { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = await this.wasm.callWasmExport( - 'poseidon2_hashes', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async poseidon2Permutation(inputsBuffer: Fr[]): Promise { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = await this.wasm.callWasmExport( - 'poseidon2_permutation', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async poseidon2HashAccumulate(inputsBuffer: Fr[]): Promise { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = await this.wasm.callWasmExport( - 'poseidon2_hash_accumulate', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async blake2s(data: Uint8Array): Promise { - const inArgs = [data].map(serializeBufferable); - const outTypes: OutputType[] = [Buffer32]; - const result = await this.wasm.callWasmExport( - 'blake2s', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async blake2sToField(data: Uint8Array): Promise { - const inArgs = [data].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = await this.wasm.callWasmExport( - 'blake2s_to_field_', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async aesEncryptBufferCbc(input: Uint8Array, iv: Uint8Array, key: Uint8Array, length: number): Promise { - const inArgs = [input, iv, key, length].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'aes_encrypt_buffer_cbc', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async aesDecryptBufferCbc(input: Uint8Array, iv: Uint8Array, key: Uint8Array, length: number): Promise { - const inArgs = [input, iv, key, length].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'aes_decrypt_buffer_cbc', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async srsInitSrs(pointsBuf: Uint8Array, numPoints: number, g2PointBuf: Uint8Array): Promise { - const inArgs = [pointsBuf, numPoints, g2PointBuf].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = await this.wasm.callWasmExport( - 'srs_init_srs', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - async srsInitGrumpkinSrs(pointsBuf: Uint8Array, numPoints: number): Promise { - const inArgs = [pointsBuf, numPoints].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = await this.wasm.callWasmExport( - 'srs_init_grumpkin_srs', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - async testThreads(threads: number, iterations: number): Promise { - const inArgs = [threads, iterations].map(serializeBufferable); - const outTypes: OutputType[] = [NumberDeserializer()]; - const result = await this.wasm.callWasmExport( - 'test_threads', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async commonInitSlabAllocator(circuitSize: number): Promise { - const inArgs = [circuitSize].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = await this.wasm.callWasmExport( - 'common_init_slab_allocator', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - async acirGetCircuitSizes( - constraintSystemBuf: Uint8Array, - recursive: boolean, - honkRecursion: boolean, - ): Promise<[number, number]> { - const inArgs = [constraintSystemBuf, recursive, honkRecursion].map(serializeBufferable); - const outTypes: OutputType[] = [NumberDeserializer(), NumberDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_get_circuit_sizes', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out as any; - } - - async acirProveAndVerifyUltraHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): Promise { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_and_verify_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirProveAndVerifyMegaHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): Promise { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_and_verify_mega_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirProveAztecClient(ivcInputsBuf: Uint8Array): Promise<[Uint8Array, Uint8Array]> { - const inArgs = [ivcInputsBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer(), BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_aztec_client', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out as any; - } - - async acirVerifyAztecClient(proofBuf: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_verify_aztec_client', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirLoadVerificationKey(acirComposerPtr: Ptr, vkBuf: Uint8Array): Promise { - const inArgs = [acirComposerPtr, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = await this.wasm.callWasmExport( - 'acir_load_verification_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - async acirInitVerificationKey(acirComposerPtr: Ptr): Promise { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = await this.wasm.callWasmExport( - 'acir_init_verification_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - async acirGetVerificationKey(acirComposerPtr: Ptr): Promise { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_get_verification_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirGetProvingKey(acirComposerPtr: Ptr, acirVec: Uint8Array, recursive: boolean): Promise { - const inArgs = [acirComposerPtr, acirVec, recursive].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_get_proving_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVerifyProof(acirComposerPtr: Ptr, proofBuf: Uint8Array): Promise { - const inArgs = [acirComposerPtr, proofBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_verify_proof', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirGetSolidityVerifier(acirComposerPtr: Ptr): Promise { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = [StringDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_get_solidity_verifier', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirHonkSolidityVerifier(proofBuf: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [StringDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_honk_solidity_verifier', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirSerializeProofIntoFields( - acirComposerPtr: Ptr, - proofBuf: Uint8Array, - numInnerPublicInputs: number, - ): Promise { - const inArgs = [acirComposerPtr, proofBuf, numInnerPublicInputs].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = await this.wasm.callWasmExport( - 'acir_serialize_proof_into_fields', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirSerializeVerificationKeyIntoFields(acirComposerPtr: Ptr): Promise<[Fr[], Fr]> { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr), Fr]; - const result = await this.wasm.callWasmExport( - 'acir_serialize_verification_key_into_fields', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out as any; - } - - async acirProveUltraZKHonk(acirVec: Uint8Array, witnessVec: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [acirVec, witnessVec, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_ultra_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirProveUltraKeccakHonk(acirVec: Uint8Array, witnessVec: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [acirVec, witnessVec, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_ultra_keccak_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirProveUltraKeccakZkHonk( - acirVec: Uint8Array, - witnessVec: Uint8Array, - vkBuf: Uint8Array, - ): Promise { - const inArgs = [acirVec, witnessVec, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_ultra_keccak_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirProveUltraStarknetHonk( - acirVec: Uint8Array, - witnessVec: Uint8Array, - vkBuf: Uint8Array, - ): Promise { - const inArgs = [acirVec, witnessVec, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_ultra_starknet_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirProveUltraStarknetZkHonk( - acirVec: Uint8Array, - witnessVec: Uint8Array, - vkBuf: Uint8Array, - ): Promise { - const inArgs = [acirVec, witnessVec, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_prove_ultra_starknet_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVerifyUltraZKHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_verify_ultra_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVerifyUltraKeccakHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_verify_ultra_keccak_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVerifyUltraKeccakZkHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_verify_ultra_keccak_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVerifyUltraStarknetHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_verify_ultra_starknet_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVerifyUltraStarknetZkHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): Promise { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_verify_ultra_starknet_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirWriteVkUltraHonk(acirVec: Uint8Array): Promise { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_write_vk_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirWriteVkUltraKeccakHonk(acirVec: Uint8Array): Promise { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_write_vk_ultra_keccak_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirWriteVkUltraKeccakZkHonk(acirVec: Uint8Array): Promise { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_write_vk_ultra_keccak_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirWriteVkUltraStarknetHonk(acirVec: Uint8Array): Promise { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_write_vk_ultra_starknet_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirWriteVkUltraStarknetZkHonk(acirVec: Uint8Array): Promise { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_write_vk_ultra_starknet_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirProofAsFieldsUltraHonk(proofBuf: Uint8Array): Promise { - const inArgs = [proofBuf].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = await this.wasm.callWasmExport( - 'acir_proof_as_fields_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVkAsFieldsUltraHonk(vkBuf: Uint8Array): Promise { - const inArgs = [vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = await this.wasm.callWasmExport( - 'acir_vk_as_fields_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirVkAsFieldsMegaHonk(vkBuf: Uint8Array): Promise { - const inArgs = [vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = await this.wasm.callWasmExport( - 'acir_vk_as_fields_mega_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - async acirGatesAztecClient(ivcInputsBuf: Uint8Array): Promise { - const inArgs = [ivcInputsBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = await this.wasm.callWasmExport( - 'acir_gates_aztec_client', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } -} -export class BarretenbergApiSync { - constructor(protected wasm: BarretenbergWasmMain) {} - - pedersenCommit(inputsBuffer: Fr[], ctxIndex: number): Point { - const inArgs = [inputsBuffer, ctxIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Point]; - const result = this.wasm.callWasmExport( - 'pedersen_commit', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - pedersenHash(inputsBuffer: Fr[], hashIndex: number): Fr { - const inArgs = [inputsBuffer, hashIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = this.wasm.callWasmExport( - 'pedersen_hash', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - pedersenHashes(inputsBuffer: Fr[], hashIndex: number): Fr { - const inArgs = [inputsBuffer, hashIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = this.wasm.callWasmExport( - 'pedersen_hashes', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - pedersenHashBuffer(inputBuffer: Uint8Array, hashIndex: number): Fr { - const inArgs = [inputBuffer, hashIndex].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = this.wasm.callWasmExport( - 'pedersen_hash_buffer', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - poseidon2Hash(inputsBuffer: Fr[]): Fr { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = this.wasm.callWasmExport( - 'poseidon2_hash', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - poseidon2Hashes(inputsBuffer: Fr[]): Fr { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = this.wasm.callWasmExport( - 'poseidon2_hashes', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - poseidon2Permutation(inputsBuffer: Fr[]): Fr[] { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = this.wasm.callWasmExport( - 'poseidon2_permutation', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - poseidon2HashAccumulate(inputsBuffer: Fr[]): Fr { - const inArgs = [inputsBuffer].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = this.wasm.callWasmExport( - 'poseidon2_hash_accumulate', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - blake2s(data: Uint8Array): Buffer32 { - const inArgs = [data].map(serializeBufferable); - const outTypes: OutputType[] = [Buffer32]; - const result = this.wasm.callWasmExport( - 'blake2s', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - blake2sToField(data: Uint8Array): Fr { - const inArgs = [data].map(serializeBufferable); - const outTypes: OutputType[] = [Fr]; - const result = this.wasm.callWasmExport( - 'blake2s_to_field_', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - aesEncryptBufferCbc(input: Uint8Array, iv: Uint8Array, key: Uint8Array, length: number): Uint8Array { - const inArgs = [input, iv, key, length].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'aes_encrypt_buffer_cbc', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - aesDecryptBufferCbc(input: Uint8Array, iv: Uint8Array, key: Uint8Array, length: number): Uint8Array { - const inArgs = [input, iv, key, length].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'aes_decrypt_buffer_cbc', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - srsInitSrs(pointsBuf: Uint8Array, numPoints: number, g2PointBuf: Uint8Array): void { - const inArgs = [pointsBuf, numPoints, g2PointBuf].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = this.wasm.callWasmExport( - 'srs_init_srs', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - srsInitGrumpkinSrs(pointsBuf: Uint8Array, numPoints: number): void { - const inArgs = [pointsBuf, numPoints].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = this.wasm.callWasmExport( - 'srs_init_grumpkin_srs', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - testThreads(threads: number, iterations: number): number { - const inArgs = [threads, iterations].map(serializeBufferable); - const outTypes: OutputType[] = [NumberDeserializer()]; - const result = this.wasm.callWasmExport( - 'test_threads', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - commonInitSlabAllocator(circuitSize: number): void { - const inArgs = [circuitSize].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = this.wasm.callWasmExport( - 'common_init_slab_allocator', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - acirGetCircuitSizes(constraintSystemBuf: Uint8Array, recursive: boolean, honkRecursion: boolean): [number, number] { - const inArgs = [constraintSystemBuf, recursive, honkRecursion].map(serializeBufferable); - const outTypes: OutputType[] = [NumberDeserializer(), NumberDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_get_circuit_sizes', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out as any; - } - - acirProveAndVerifyUltraHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): boolean { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_and_verify_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirProveAndVerifyMegaHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): boolean { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_and_verify_mega_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirProveAztecClient(ivcInputsBuf: Uint8Array): [Uint8Array, Uint8Array] { - const inArgs = [ivcInputsBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer(), BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_aztec_client', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out as any; - } - - acirVerifyAztecClient(proofBuf: Uint8Array, vkBuf: Uint8Array): boolean { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_verify_aztec_client', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirLoadVerificationKey(acirComposerPtr: Ptr, vkBuf: Uint8Array): void { - const inArgs = [acirComposerPtr, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = this.wasm.callWasmExport( - 'acir_load_verification_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - acirInitVerificationKey(acirComposerPtr: Ptr): void { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = []; - const result = this.wasm.callWasmExport( - 'acir_init_verification_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return; - } - - acirGetVerificationKey(acirComposerPtr: Ptr): Uint8Array { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_get_verification_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirGetProvingKey(acirComposerPtr: Ptr, acirVec: Uint8Array, recursive: boolean): Uint8Array { - const inArgs = [acirComposerPtr, acirVec, recursive].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_get_proving_key', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVerifyProof(acirComposerPtr: Ptr, proofBuf: Uint8Array): boolean { - const inArgs = [acirComposerPtr, proofBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_verify_proof', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirGetSolidityVerifier(acirComposerPtr: Ptr): string { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = [StringDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_get_solidity_verifier', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirHonkSolidityVerifier(proofBuf: Uint8Array, vkBuf: Uint8Array): string { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [StringDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_honk_solidity_verifier', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirSerializeProofIntoFields(acirComposerPtr: Ptr, proofBuf: Uint8Array, numInnerPublicInputs: number): Fr[] { - const inArgs = [acirComposerPtr, proofBuf, numInnerPublicInputs].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = this.wasm.callWasmExport( - 'acir_serialize_proof_into_fields', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirSerializeVerificationKeyIntoFields(acirComposerPtr: Ptr): [Fr[], Fr] { - const inArgs = [acirComposerPtr].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr), Fr]; - const result = this.wasm.callWasmExport( - 'acir_serialize_verification_key_into_fields', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out as any; - } - - acirProveUltraHonk(acirVec: Uint8Array, witnessVec: Uint8Array): Uint8Array { - const inArgs = [acirVec, witnessVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirProveUltraKeccakHonk(acirVec: Uint8Array, witnessVec: Uint8Array): Uint8Array { - const inArgs = [acirVec, witnessVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_ultra_keccak_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirProveUltraKeccakZkHonk(acirVec: Uint8Array, witnessVec: Uint8Array): Uint8Array { - const inArgs = [acirVec, witnessVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_ultra_keccak_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirProveUltraStarknetHonk(acirVec: Uint8Array, witnessVec: Uint8Array): Uint8Array { - const inArgs = [acirVec, witnessVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_ultra_starknet_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirProveUltraStarknetZkHonk(acirVec: Uint8Array, witnessVec: Uint8Array): Uint8Array { - const inArgs = [acirVec, witnessVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_prove_ultra_starknet_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVerifyUltraHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): boolean { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_verify_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVerifyUltraKeccakHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): boolean { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_verify_ultra_keccak_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVerifyUltraKeccakZkHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): boolean { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_verify_ultra_keccak_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVerifyUltraStarknetHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): boolean { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_verify_ultra_starknet_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVerifyUltraStarknetZkHonk(proofBuf: Uint8Array, vkBuf: Uint8Array): boolean { - const inArgs = [proofBuf, vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BoolDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_verify_ultra_starknet_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirWriteVkUltraHonk(acirVec: Uint8Array): Uint8Array { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_write_vk_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirWriteVkUltraKeccakHonk(acirVec: Uint8Array): Uint8Array { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_write_vk_ultra_keccak_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirWriteVkUltraKeccakZkHonk(acirVec: Uint8Array): Uint8Array { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_write_vk_ultra_keccak_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirWriteVkUltraStarknetHonk(acirVec: Uint8Array): Uint8Array { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_write_vk_ultra_starknet_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirWriteVkUltraStarknetZkHonk(acirVec: Uint8Array): Uint8Array { - const inArgs = [acirVec].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_write_vk_ultra_starknet_zk_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirProofAsFieldsUltraHonk(proofBuf: Uint8Array): Fr[] { - const inArgs = [proofBuf].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = this.wasm.callWasmExport( - 'acir_proof_as_fields_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVkAsFieldsUltraHonk(vkBuf: Uint8Array): Fr[] { - const inArgs = [vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = this.wasm.callWasmExport( - 'acir_vk_as_fields_ultra_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirVkAsFieldsMegaHonk(vkBuf: Uint8Array): Fr[] { - const inArgs = [vkBuf].map(serializeBufferable); - const outTypes: OutputType[] = [VectorDeserializer(Fr)]; - const result = this.wasm.callWasmExport( - 'acir_vk_as_fields_mega_honk', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } - - acirGatesAztecClient(ivcInputsBuf: Uint8Array): Uint8Array { - const inArgs = [ivcInputsBuf].map(serializeBufferable); - const outTypes: OutputType[] = [BufferDeserializer()]; - const result = this.wasm.callWasmExport( - 'acir_gates_aztec_client', - inArgs, - outTypes.map(t => t.SIZE_IN_BYTES), - ); - const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out[0]; - } -} diff --git a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_base/index.ts b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_base/index.ts index 9bfa35e462b9..c6f11d6f22b0 100644 --- a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_base/index.ts +++ b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_base/index.ts @@ -121,12 +121,12 @@ export class BarretenbergWasmBase { mem.set(arr, offset); } - // PRIVATE METHODS - - private getMemory() { + public getMemory() { return new Uint8Array(this.memory.buffer); } + // PRIVATE METHODS + private stringFromAddress(addr: number) { addr = addr >>> 0; const m = this.getMemory(); diff --git a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/heap_allocator.ts b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/heap_allocator.ts index 14f7cfbdcb66..0c4af764e50d 100644 --- a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/heap_allocator.ts +++ b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/heap_allocator.ts @@ -5,25 +5,29 @@ import { type BarretenbergWasmMain } from './index.js'; * The WASM memory layout has 1024 bytes of unused "scratch" space at the start (addresses 0-1023). * We can leverage this for IO rather than making expensive bb_malloc bb_free calls. * Heap allocations will be created for input/output args that don't fit into the scratch space. - * Input and output args can use the same scratch space as it's assume all input reads will be performed before any - * output writes are performed. + * Input scratch grows UP from 0, output scratch grows DOWN from 1024, meeting in the middle. + * This maximizes space utilization while preventing overlap. */ export class HeapAllocator { private allocs: number[] = []; - private inScratchRemaining = 1024; - private outScratchRemaining = 1024; + private inScratchPtr = 0; // Next input starts here, grows UP + private outScratchPtr = 1024; // Next output ends here, grows DOWN constructor(private wasm: BarretenbergWasmMain) {} getInputs(buffers: (Uint8Array | number)[]) { return buffers.map(bufOrNum => { if (typeof bufOrNum === 'object') { - if (bufOrNum.length <= this.inScratchRemaining) { - const ptr = (this.inScratchRemaining -= bufOrNum.length); + const size = bufOrNum.length; + // Check if there's room in scratch space (inputs grow up, outputs grow down) + if (this.inScratchPtr + size <= this.outScratchPtr) { + const ptr = this.inScratchPtr; + this.inScratchPtr += size; // Grow UP this.wasm.writeMemory(ptr, bufOrNum); return ptr; } else { - const ptr = this.wasm.call('bbmalloc', bufOrNum.length); + // Fall back to heap allocation + const ptr = this.wasm.call('bbmalloc', size); this.wasm.writeMemory(ptr, bufOrNum); this.allocs.push(ptr); return ptr; @@ -40,9 +44,12 @@ export class HeapAllocator { // WARNING: 4 only works with WASM as it has 32 bit memory. const size = len || 4; - if (size <= this.outScratchRemaining) { - return (this.outScratchRemaining -= size); + // Check if there's room in scratch space (inputs grow up, outputs grow down) + if (this.inScratchPtr + size <= this.outScratchPtr) { + this.outScratchPtr -= size; // Grow DOWN + return this.outScratchPtr; } else { + // Fall back to heap allocation const ptr = this.wasm.call('bbmalloc', size); this.allocs.push(ptr); return ptr; @@ -51,6 +58,7 @@ export class HeapAllocator { } addOutputPtr(ptr: number) { + // Only add to dealloc list if it's a heap allocation (not in scratch space 0-1023) if (ptr >= 1024) { this.allocs.push(ptr); } diff --git a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts index 6c818a99052b..21e027fd540e 100644 --- a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts +++ b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts @@ -19,6 +19,11 @@ export class BarretenbergWasmMain extends BarretenbergWasmBase { private nextWorker = 0; private nextThreadId = 1; + // Pre-allocated scratch buffers for msgpack I/O to avoid malloc/free overhead + private msgpackInputScratch: number = 0; // 8MB input buffer + private msgpackOutputScratch: number = 0; // 8MB output buffer + private readonly MSGPACK_SCRATCH_SIZE = 1024 * 1024 * 8; // 8MB + public getNumThreads() { return this.workers.length + 1; } @@ -54,6 +59,14 @@ export class BarretenbergWasmMain extends BarretenbergWasmBase { // Init all global/static data. this.call('_initialize'); + // Allocate dedicated msgpack scratch buffers (never freed, reused for all msgpack calls) + this.msgpackInputScratch = this.call('bbmalloc', this.MSGPACK_SCRATCH_SIZE); + this.msgpackOutputScratch = this.call('bbmalloc', this.MSGPACK_SCRATCH_SIZE); + this.logger( + `Allocated msgpack scratch buffers: ` + + `input @ ${this.msgpackInputScratch}, output @ ${this.msgpackOutputScratch} (${this.MSGPACK_SCRATCH_SIZE} bytes each)`, + ); + // Create worker threads. Create 1 less than requested, as main thread counts as a thread. if (threads > 1) { this.logger(`Creating ${threads} worker threads`); @@ -138,25 +151,59 @@ export class BarretenbergWasmMain extends BarretenbergWasmBase { } cbindCall(cbind: string, inputBuffer: Uint8Array): any { - const outputSizePtr = this.call('bbmalloc', 4); - const outputMsgpackPtr = this.call('bbmalloc', 4); + const needsCustomInputBuffer = inputBuffer.length > this.MSGPACK_SCRATCH_SIZE; + let inputPtr: number; + + if (needsCustomInputBuffer) { + // Allocate temporary buffer for oversized input + inputPtr = this.call('bbmalloc', inputBuffer.length); + } else { + // Use pre-allocated scratch buffer + inputPtr = this.msgpackInputScratch; + } - const inputPtr = this.call('bbmalloc', inputBuffer.length); + // Write input to buffer this.writeMemory(inputPtr, inputBuffer); - this.call(cbind, inputPtr, inputBuffer.length, outputMsgpackPtr, outputSizePtr); - const readPtr32 = (ptr32: number) => { - const dataView = new DataView(this.getMemorySlice(ptr32, ptr32 + 4).buffer); - return dataView.getUint32(0, true); - }; + // Setup output scratch buffer with IN-OUT parameter pattern: + // Reserve 8 bytes for metadata (pointer + size), rest is scratch data space + const METADATA_SIZE = 8; + const outputPtrLocation = this.msgpackOutputScratch; + const outputSizeLocation = this.msgpackOutputScratch + 4; + const scratchDataPtr = this.msgpackOutputScratch + METADATA_SIZE; + const scratchDataSize = this.MSGPACK_SCRATCH_SIZE - METADATA_SIZE; + + // Get memory once and create DataView for all reads/writes (avoids creating multiple typed arrays) + const mem = this.getMemory(); + const view = new DataView(mem.buffer); + + // Write IN values: provide scratch buffer pointer and size to C++ + view.setUint32(outputPtrLocation, scratchDataPtr, true); + view.setUint32(outputSizeLocation, scratchDataSize, true); + + // Call WASM + this.call(cbind, inputPtr, inputBuffer.length, outputPtrLocation, outputSizeLocation); + + // Free custom input buffer if allocated + if (needsCustomInputBuffer) { + this.call('bbfree', inputPtr); + } + + // Read OUT values: C++ returns actual buffer pointer and size + const outputDataPtr = view.getUint32(outputPtrLocation, true); + const outputSize = view.getUint32(outputSizeLocation, true); + + // Check if C++ used scratch (pointer unchanged) or allocated (pointer changed) + const usedScratch = outputDataPtr === scratchDataPtr; + + // Copy output data from WASM memory + const encodedResult = this.getMemorySlice(outputDataPtr, outputDataPtr + outputSize); + + // Only free if C++ allocated beyond scratch + if (!usedScratch) { + this.call('bbfree', outputDataPtr); + } - const encodedResult = this.getMemorySlice( - readPtr32(outputMsgpackPtr), - readPtr32(outputMsgpackPtr) + readPtr32(outputSizePtr), - ); - this.call('bbfree', inputPtr); - this.call('bbfree', outputSizePtr); - this.call('bbfree', outputMsgpackPtr); return encodedResult; } } diff --git a/barretenberg/ts/src/barretenberg_wasm/index.test.ts b/barretenberg/ts/src/barretenberg_wasm/index.test.ts index 427640861a1c..5143bc4d5e55 100644 --- a/barretenberg/ts/src/barretenberg_wasm/index.test.ts +++ b/barretenberg/ts/src/barretenberg_wasm/index.test.ts @@ -1,17 +1,23 @@ -import { BarretenbergWasmMain, BarretenbergWasmMainWorker } from './barretenberg_wasm_main/index.js'; -import { Barretenberg } from '../index.js'; +import { createMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/factory/node/index.js'; +import { BarretenbergWasmMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/index.js'; +import { getRemoteBarretenbergWasm } from '../barretenberg_wasm/helpers/index.js'; +import { fetchModuleAndThreads } from '../barretenberg_wasm/index.js'; +import { Worker } from 'worker_threads'; describe('barretenberg wasm', () => { - let api: Barretenberg; let wasm: BarretenbergWasmMainWorker; + let worker: Worker; beforeAll(async () => { - api = await Barretenberg.new({ threads: 2 }); - wasm = api.getWasm(); + worker = await createMainWorker(); + wasm = getRemoteBarretenbergWasm(worker); + const { module, threads } = await fetchModuleAndThreads(2); + await wasm.init(module, threads); }, 20000); afterAll(async () => { - await api.destroy(); + await wasm.destroy(); + await worker.terminate(); }); it('should new malloc, transfer and slice mem', async () => { @@ -28,11 +34,6 @@ describe('barretenberg wasm', () => { await expect(() => wasm.call('test_abort')).rejects.toThrow(); }); - it('test c/c++ stdout/stderr', async () => { - // We're checking we don't crash, but you can manually confirm you see log lines handled by logstr. - await wasm.call('test_stdout_stderr'); - }); - it('should new malloc, transfer and slice mem', async () => { const length = 1024; const ptr = await wasm.call('bbmalloc', length); diff --git a/barretenberg/ts/src/bb_backends/browser/index.ts b/barretenberg/ts/src/bb_backends/browser/index.ts new file mode 100644 index 000000000000..33374de9fc9d --- /dev/null +++ b/barretenberg/ts/src/bb_backends/browser/index.ts @@ -0,0 +1,50 @@ +import { BarretenbergWasmSyncBackend, BarretenbergWasmAsyncBackend } from '../wasm.js'; +import { Barretenberg, BarretenbergSync } from '../../barretenberg/index.js'; +import { BackendOptions, BackendType } from '../index.js'; + +/** + * Create backend of specific type (no fallback) + */ +export async function createAsyncBackend( + type: BackendType, + options: BackendOptions, + logger: (msg: string) => void, +): Promise { + switch (type) { + case BackendType.Wasm: + case BackendType.WasmWorker: { + const useWorker = type === BackendType.WasmWorker; + logger(`Using WASM backend (worker: ${useWorker})`); + const wasm = await BarretenbergWasmAsyncBackend.new({ + threads: options.threads, + wasmPath: options.wasmPath, + logger, + memory: options.memory, + useWorker, + }); + return new Barretenberg(wasm, options); + } + + default: + throw new Error(`Unknown backend type: ${type}`); + } +} + +/** + * Create backend of specific type (no fallback) + */ +export async function createSyncBackend( + type: BackendType, + options: BackendOptions, + logger: (msg: string) => void, +): Promise { + switch (type) { + case BackendType.Wasm: + logger('Using WASM backend'); + const wasm = await BarretenbergWasmSyncBackend.new(options.wasmPath, logger); + return new BarretenbergSync(wasm); + + default: + throw new Error(`Backend ${type} not supported for BarretenbergSync`); + } +} diff --git a/barretenberg/ts/src/bb_backends/index.ts b/barretenberg/ts/src/bb_backends/index.ts new file mode 100644 index 000000000000..6ced95d7053b --- /dev/null +++ b/barretenberg/ts/src/bb_backends/index.ts @@ -0,0 +1,49 @@ +/** + * Backend types for Barretenberg + */ +export enum BackendType { + /** WASM direct execution (no worker) */ + Wasm = 'wasm', + /** WASM with worker threads */ + WasmWorker = 'wasm-worker', + /** Native via Unix domain socket (async only) */ + NativeUnixSocket = 'native-unix-socket', + /** Native via shared memory (sync only currently) */ + NativeSharedMemory = 'native-shared-mem', +} + +export type BackendOptions = { + /** @description Number of threads to run the backend worker on */ + threads?: number; + + /** @description Initial and Maximum memory to be alloted to the backend worker */ + memory?: { initial?: number; maximum?: number }; + + /** @description Path to download CRS files */ + crsPath?: string; + + /** @description Path to download WASM files */ + wasmPath?: string; + + /** @description Custom path to bb binary for native backend (overrides automatic detection) */ + bbPath?: string; + + /** @description Logging function */ + logger?: (msg: string) => void; + + /** + * @description Maximum concurrent clients for shared memory IPC server (default: 1) + * Only applies to NativeSharedMemory backend + */ + maxClients?: number; + + /** + * @description Specify exact backend to use + * - If unset: tries backends in default order with fallback + * - If set: must succeed with specified backend or throw error (no fallback) + * + * Barretenberg (async) supports: all types + * BarretenbergSync supports: Wasm, NativeSharedMem only + */ + backend?: BackendType; +}; diff --git a/barretenberg/ts/src/bb_backends/interface.ts b/barretenberg/ts/src/bb_backends/interface.ts new file mode 100644 index 000000000000..fe2e54a84f95 --- /dev/null +++ b/barretenberg/ts/src/bb_backends/interface.ts @@ -0,0 +1,35 @@ +/** + * Generic interface for msgpack backend implementations. + * Both WASM and native binary backends implement this interface. + */ +export interface IMsgpackBackend { + /** + * Execute a msgpack command and return the msgpack response. + * @param inputBuffer The msgpack-encoded input buffer + * @returns The msgpack-encoded response buffer (sync or async) + */ + call(inputBuffer: Uint8Array): Uint8Array | Promise; + + /** + * Clean up resources. + */ + destroy(): void | Promise; +} + +/** + * Synchronous variant of IMsgpackBackend. + * Used by BarretenbergSync and SyncApi. + */ +export interface IMsgpackBackendSync extends IMsgpackBackend { + call(inputBuffer: Uint8Array): Uint8Array; + destroy(): void; +} + +/** + * Asynchronous variant of IMsgpackBackend. + * Used by Barretenberg and AsyncApi. + */ +export interface IMsgpackBackendAsync extends IMsgpackBackend { + call(inputBuffer: Uint8Array): Promise; + destroy(): Promise; +} diff --git a/barretenberg/ts/src/bb_backends/node/index.ts b/barretenberg/ts/src/bb_backends/node/index.ts new file mode 100644 index 000000000000..4ca409bb914a --- /dev/null +++ b/barretenberg/ts/src/bb_backends/node/index.ts @@ -0,0 +1,94 @@ +import { BarretenbergNativeSocketAsyncBackend } from './native_socket.js'; +import { BarretenbergWasmSyncBackend, BarretenbergWasmAsyncBackend } from '../wasm.js'; +import { BarretenbergNativeShmSyncBackend } from './native_shm.js'; +import { SyncToAsyncAdapter } from '../sync_to_async_adapter.js'; +import { findBbBinary, findNapiBinary } from './platform.js'; +import { Barretenberg, BarretenbergSync } from '../../barretenberg/index.js'; +import { BackendOptions, BackendType } from '../index.js'; + +/** + * Create backend of specific type (no fallback) + */ +export async function createAsyncBackend( + type: BackendType, + options: BackendOptions, + logger: (msg: string) => void, +): Promise { + switch (type) { + case BackendType.NativeUnixSocket: { + const bbPath = findBbBinary(options.bbPath); + if (!bbPath) { + throw new Error('Native backend requires bb binary.'); + } + logger(`Using native Unix socket backend: ${bbPath}`); + const socket = new BarretenbergNativeSocketAsyncBackend(bbPath, options.threads); + return new Barretenberg(socket, options); + } + + case BackendType.NativeSharedMemory: { + const bbPath = findBbBinary(options.bbPath); + if (!bbPath) { + throw new Error('Native backend requires bb binary.'); + } + const napiPath = findNapiBinary(); + if (!napiPath) { + throw new Error('Native sync backend requires napi client stub.'); + } + logger(`Using native shared memory backend (via sync adapter): ${bbPath}`); + // Use sync backend with adapter to provide async interface + const syncBackend = await BarretenbergNativeShmSyncBackend.new(bbPath, options.threads, options.maxClients); + const asyncBackend = new SyncToAsyncAdapter(syncBackend); + return new Barretenberg(asyncBackend, options); + } + + case BackendType.Wasm: + case BackendType.WasmWorker: { + const useWorker = type === BackendType.WasmWorker; + logger(`Using WASM backend (worker: ${useWorker})`); + const wasm = await BarretenbergWasmAsyncBackend.new({ + threads: options.threads, + wasmPath: options.wasmPath, + logger: options.logger, + memory: options.memory, + useWorker, + }); + return new Barretenberg(wasm, options); + } + + default: + throw new Error(`Unknown backend type: ${type}`); + } +} + +/** + * Create backend of specific type (no fallback) + */ +export async function createSyncBackend( + type: BackendType, + options: BackendOptions, + logger: (msg: string) => void, +): Promise { + switch (type) { + case BackendType.NativeSharedMemory: { + const bbPath = findBbBinary(options.bbPath); + if (!bbPath) { + throw new Error('Native backend requires bb binary.'); + } + const napiPath = findNapiBinary(); + if (!napiPath) { + throw new Error('Native sync backend requires napi client stub.'); + } + logger(`Using native shared memory backend: ${bbPath}`); + const shm = await BarretenbergNativeShmSyncBackend.new(bbPath, options.threads, options.maxClients); + return new BarretenbergSync(shm); + } + + case BackendType.Wasm: + logger('Using WASM backend'); + const wasm = await BarretenbergWasmSyncBackend.new(options.wasmPath, logger); + return new BarretenbergSync(wasm); + + default: + throw new Error(`Backend ${type} not supported for BarretenbergSync`); + } +} diff --git a/barretenberg/ts/src/bb_backends/node/native_pipe.ts b/barretenberg/ts/src/bb_backends/node/native_pipe.ts new file mode 100644 index 000000000000..1b9e2a3e50d4 --- /dev/null +++ b/barretenberg/ts/src/bb_backends/node/native_pipe.ts @@ -0,0 +1,127 @@ +import { spawn, ChildProcess } from 'child_process'; +import * as fs from 'fs'; +import { IMsgpackBackendSync, IMsgpackBackendAsync } from '../interface.js'; + +/** + * Asynchronous native backend that communicates with bb binary via stdin/stdout. + * Uses event-based I/O with a state machine to handle partial reads. + * + * Protocol: + * - Request: 4-byte little-endian length + msgpack buffer + * - Response: 4-byte little-endian length + msgpack buffer + */ +export class BarretenbergNativePipeAsyncBackend implements IMsgpackBackendAsync { + private process: ChildProcess; + private pendingResolve: ((data: Uint8Array) => void) | null = null; + private pendingReject: ((error: Error) => void) | null = null; + + // State machine for reading responses + private readingLength: boolean = true; + private lengthBuffer: Buffer = Buffer.alloc(4); + private lengthBytesRead: number = 0; + private responseLength: number = 0; + private responseBuffer: Buffer | null = null; + private responseBytesRead: number = 0; + + constructor(bbBinaryPath: string) { + this.process = spawn(bbBinaryPath, ['msgpack', 'run'], { + stdio: ['pipe', 'pipe', 'inherit'], + }); + + this.process.stdout!.on('data', (chunk: Buffer) => { + this.handleData(chunk); + }); + + this.process.on('error', err => { + if (this.pendingReject) { + this.pendingReject(new Error(`Native backend process error: ${err.message}`)); + this.pendingReject = null; + this.pendingResolve = null; + } + }); + + this.process.on('exit', (code, signal) => { + if (this.pendingReject) { + if (code !== null && code !== 0) { + this.pendingReject(new Error(`Native backend process exited with code ${code}`)); + } else if (signal) { + if (signal != 'SIGTERM') { + this.pendingReject(new Error(`Native backend process killed with signal ${signal}`)); + } + } else { + this.pendingReject(new Error('Native backend process exited unexpectedly')); + } + this.pendingReject = null; + this.pendingResolve = null; + } + }); + } + + private handleData(chunk: Buffer): void { + let offset = 0; + + while (offset < chunk.length) { + if (this.readingLength) { + // Reading 4-byte length prefix + const bytesToCopy = Math.min(4 - this.lengthBytesRead, chunk.length - offset); + chunk.copy(this.lengthBuffer, this.lengthBytesRead, offset, offset + bytesToCopy); + this.lengthBytesRead += bytesToCopy; + offset += bytesToCopy; + + if (this.lengthBytesRead === 4) { + // Length is complete, switch to reading data + this.responseLength = this.lengthBuffer.readUInt32LE(0); + this.responseBuffer = Buffer.alloc(this.responseLength); + this.responseBytesRead = 0; + this.readingLength = false; + } + } else { + // Reading response data + const bytesToCopy = Math.min(this.responseLength - this.responseBytesRead, chunk.length - offset); + chunk.copy(this.responseBuffer!, this.responseBytesRead, offset, offset + bytesToCopy); + this.responseBytesRead += bytesToCopy; + offset += bytesToCopy; + + if (this.responseBytesRead === this.responseLength) { + // Response is complete + if (this.pendingResolve) { + this.pendingResolve(new Uint8Array(this.responseBuffer!)); + this.pendingResolve = null; + this.pendingReject = null; + } + + // Reset state for next message + this.readingLength = true; + this.lengthBytesRead = 0; + this.responseLength = 0; + this.responseBuffer = null; + this.responseBytesRead = 0; + } + } + } + } + + async call(inputBuffer: Uint8Array): Promise { + if (this.pendingResolve) { + throw new Error('Cannot call while another call is pending (no pipelining supported)'); + } + + return new Promise((resolve, reject) => { + this.pendingResolve = resolve; + this.pendingReject = reject; + + // Write request: 4-byte little-endian length + msgpack data + const lengthBuf = Buffer.alloc(4); + lengthBuf.writeUInt32LE(inputBuffer.length, 0); + this.process.stdin!.write(lengthBuf); + this.process.stdin!.write(inputBuffer); + }); + } + + async destroy(): Promise { + this.process.kill(); + return new Promise(resolve => { + this.process.once('exit', () => resolve()); + }); + } +} diff --git a/barretenberg/ts/src/bb_backends/node/native_shm.ts b/barretenberg/ts/src/bb_backends/node/native_shm.ts new file mode 100644 index 000000000000..71e0eaade7ea --- /dev/null +++ b/barretenberg/ts/src/bb_backends/node/native_shm.ts @@ -0,0 +1,171 @@ +import { createRequire } from 'module'; +import { spawn, ChildProcess } from 'child_process'; +import { IMsgpackBackendSync } from '../interface.js'; +import { findNapiBinary, findPackageRoot } from './platform.js'; + +// Import the NAPI module +// The addon is built to the nodejs_module directory +const addonPath = findNapiBinary(); +// Try loading, but don't throw if it doesn't exist (will be caught in constructor) +let addon: any = null; +try { + if (addonPath) { + const require = createRequire(findPackageRoot()!); + addon = require(addonPath); + } +} catch (err) { + // Addon not built yet or not available + addon = null; +} + +/** + * Synchronous shared memory backend that communicates with bb binary via shared memory. + * Uses NAPI module to interface with shared memory IPC. + * + * Architecture: bb acts as the SERVER, TypeScript is the CLIENT + * - bb creates the shared memory region + * - TypeScript connects via NAPI wrapper + * + * Protocol: + * - Handled internally by IpcClient (no manual length prefixes needed) + */ +export class BarretenbergNativeShmSyncBackend implements IMsgpackBackendSync { + private process: ChildProcess; + private client: any; // NAPI MsgpackClient instance + + private constructor(process: ChildProcess, client: any) { + this.process = process; + this.client = client; + } + + /** + * Create and initialize a shared memory backend. + * @param bbBinaryPath Path to bb binary + * @param threads Optional number of threads + * @param maxClients Optional maximum concurrent clients (default: 1) + */ + static async new( + bbBinaryPath: string, + threads?: number, + maxClients?: number, + ): Promise { + if (!addon || !addon.MsgpackClient) { + throw new Error('Shared memory NAPI not available.'); + } + + // Create a unique shared memory name + const shmName = `bb-${process.pid}-${Date.now()}`; + + // Default maxClients to 1 if not specified + const clientCount = maxClients ?? 1; + + // Set HARDWARE_CONCURRENCY if threads specified + const env = threads !== undefined ? { ...process.env, HARDWARE_CONCURRENCY: threads.toString() } : process.env; + + // Spawn bb process with shared memory mode + const args = [bbBinaryPath, 'msgpack', 'run', '--input', `${shmName}.shm`, '--max-clients', clientCount.toString()]; + const bbProcess = spawn(findPackageRoot() + '/scripts/kill_wrapper.sh', args, { + stdio: ['ignore', 'ignore', 'ignore'], + env, + }); + // Disconnect from event loop so process can exit. The kill wrapper will reap bb once parent (node) dies. + bbProcess.unref(); + + // Capture stderr for error diagnostics + // bbProcess.stderr?.on('data', (data: Buffer) => { + // stderrOutput += data.toString(); + // }); + + // Track if process has exited + let processExited = false; + let exitError: Error | null = null; + + bbProcess.on('error', err => { + processExited = true; + exitError = new Error(`Native backend process error: ${err.message}`); + }); + + bbProcess.on('exit', (code, signal) => { + processExited = true; + if (code !== null && code !== 0) { + exitError = new Error(`Native backend process exited with code ${code}`); + } else if (signal && signal !== 'SIGTERM') { + exitError = new Error(`Native backend process killed with signal ${signal}`); + } + }); + + // Wait for bb to create shared memory + // Retry connection every 100ms for up to 3 seconds + const retryInterval = 100; // ms + const timeout = 3000; // ms + const maxAttempts = Math.floor(timeout / retryInterval); + let client: any = null; + + try { + for (let attempt = 0; attempt < maxAttempts; attempt++) { + // Check if bb process has exited before attempting connection + if (processExited) { + throw exitError || new Error('Native backend process exited unexpectedly during startup'); + } + + // Wait before attempting connection (except first attempt) + if (attempt > 0) { + await new Promise(resolve => setTimeout(resolve, retryInterval)); + } + + try { + // Create NAPI client with matching max_clients value + client = new addon.MsgpackClient(shmName, clientCount); + break; // Success! + } catch (err: any) { + // Connection failed, will retry + if (attempt === maxAttempts - 1) { + // Last attempt failed - check one more time if process exited + if (processExited && exitError) { + throw exitError; + } + throw new Error(`Failed to connect to shared memory after ${timeout}ms: ${err.message}`); + } + } + } + + if (!client) { + throw new Error('Failed to create client connection'); + } + + return new BarretenbergNativeShmSyncBackend(bbProcess, client); + } finally { + // If we failed to connect, ensure the process is killed + // kill() returns false if process already exited, but doesn't throw + if (!client) { + bbProcess.kill('SIGKILL'); + } + } + } + + call(inputBuffer: Uint8Array): Uint8Array { + try { + const responseBuffer = this.client.call(Buffer.from(inputBuffer)); + return new Uint8Array(responseBuffer); + } catch (err: any) { + throw new Error(`Shared memory call failed: ${err.message}`); + } + } + + private cleanup(): void { + if (this.client) { + try { + this.client.close(); + } catch (e) { + // Ignore errors during cleanup + } + } + } + + destroy(): void { + this.cleanup(); + this.process.kill('SIGTERM'); + // Remove process event listeners to prevent hanging + this.process.removeAllListeners(); + } +} diff --git a/barretenberg/ts/src/bb_backends/node/native_socket.ts b/barretenberg/ts/src/bb_backends/node/native_socket.ts new file mode 100644 index 000000000000..a1610ee4bdcd --- /dev/null +++ b/barretenberg/ts/src/bb_backends/node/native_socket.ts @@ -0,0 +1,312 @@ +import { spawn, ChildProcess } from 'child_process'; +import * as net from 'net'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import { IMsgpackBackendAsync } from '../interface.js'; +import { findPackageRoot } from './platform.js'; + +/** + * Asynchronous native backend that communicates with bb binary via Unix Domain Socket. + * Uses event-based I/O with a state machine to handle partial reads. + * + * Architecture: bb acts as the SERVER, TypeScript is the CLIENT + * - bb creates the socket and listens for connections + * - TypeScript waits for socket file to exist, then connects + * + * Protocol: + * - Request: 4-byte little-endian length + msgpack buffer + * - Response: 4-byte little-endian length + msgpack buffer + */ +export class BarretenbergNativeSocketAsyncBackend implements IMsgpackBackendAsync { + private process: ChildProcess; + private socket: net.Socket | null = null; + private socketPath: string; + private connectionPromise: Promise; + private connectionTimeout: NodeJS.Timeout | null = null; + + // Queue of pending callbacks for pipelined requests + // Responses come back in FIFO order, so we match them with queued callbacks + private pendingCallbacks: Array<{ + resolve: (data: Uint8Array) => void; + reject: (error: Error) => void; + }> = []; + + // State machine for reading responses + private readingLength: boolean = true; + private lengthBuffer: Buffer = Buffer.alloc(4); + private lengthBytesRead: number = 0; + private responseLength: number = 0; + private responseBuffer: Buffer | null = null; + private responseBytesRead: number = 0; + + constructor(bbBinaryPath: string, threads?: number) { + // Create a unique socket path in temp directory + this.socketPath = path.join(os.tmpdir(), `bb-${process.pid}-${Date.now()}.sock`); + + // Ensure socket path doesn't already exist (cleanup from previous crashes) + if (fs.existsSync(this.socketPath)) { + fs.unlinkSync(this.socketPath); + } + + let connectionResolve: (() => void) | null = null; + let connectionReject: ((error: Error) => void) | null = null; + + this.connectionPromise = new Promise((resolve, reject) => { + connectionResolve = resolve; + connectionReject = reject; + }); + + // Set HARDWARE_CONCURRENCY if threads specified + const env = threads !== undefined ? { ...process.env, HARDWARE_CONCURRENCY: threads.toString() } : process.env; + + // Spawn bb process - it will create the socket server + const args = [bbBinaryPath, 'msgpack', 'run', '--input', this.socketPath]; + this.process = spawn(findPackageRoot() + '/scripts/kill_wrapper.sh', args, { + stdio: ['ignore', 'ignore', 'ignore'], + env, + }); + // Disconnect from event loop so process can exit. The kill wrapper will reap bb once parent (node) dies. + this.process.unref(); + + this.process.on('error', err => { + if (connectionReject) { + connectionReject(new Error(`Native backend process error: ${err.message}`)); + connectionReject = null; + connectionResolve = null; + } + // Reject all pending callbacks + const error = new Error(`Native backend process error: ${err.message}`); + for (const callback of this.pendingCallbacks) { + callback.reject(error); + } + this.pendingCallbacks = []; + }); + + this.process.on('exit', (code, signal) => { + const errorMsg = + code !== null && code !== 0 + ? `Native backend process exited with code ${code}` + : signal && signal !== 'SIGTERM' + ? `Native backend process killed with signal ${signal}` + : 'Native backend process exited unexpectedly'; + + if (connectionReject) { + connectionReject(new Error(errorMsg)); + connectionReject = null; + connectionResolve = null; + } + // Reject all pending callbacks + const error = new Error(errorMsg); + for (const callback of this.pendingCallbacks) { + callback.reject(error); + } + this.pendingCallbacks = []; + }); + + // Wait for bb to create socket file, then connect + this.waitForSocketAndConnect() + .then(() => { + if (connectionResolve) { + connectionResolve(); + connectionResolve = null; + connectionReject = null; + } + }) + .catch(err => { + if (connectionReject) { + connectionReject(err); + connectionReject = null; + connectionResolve = null; + } + }); + + // Set a timeout for connection + this.connectionTimeout = setTimeout(() => { + if (connectionReject) { + connectionReject(new Error('Timeout waiting for bb socket connection')); + connectionReject = null; + connectionResolve = null; + this.cleanup(); + } + }, 5000); + } + + private async waitForSocketAndConnect(): Promise { + // Poll for socket file to exist (bb is creating it) + const startTime = Date.now(); + while (!fs.existsSync(this.socketPath)) { + if (Date.now() - startTime > 5000) { + throw new Error('Timeout waiting for bb to create socket file'); + } + await new Promise(resolve => setTimeout(resolve, 50)); + } + + // Additional check: ensure it's actually a socket + const stats = fs.statSync(this.socketPath); + if (!stats.isSocket()) { + throw new Error(`Path exists but is not a socket: ${this.socketPath}`); + } + + // Connect to bb's socket server as a client + return new Promise((resolve, reject) => { + this.socket = net.connect(this.socketPath); + + // Disable Nagle's algorithm for lower latency + this.socket.setNoDelay(true); + + // Set up event handlers + this.socket.once('connect', () => { + this.socket!.unref(); + + // Clear connection timeout on successful connection + if (this.connectionTimeout) { + clearTimeout(this.connectionTimeout); + this.connectionTimeout = null; + } + resolve(); + }); + + this.socket.once('error', err => { + reject(new Error(`Failed to connect to bb socket: ${err.message}`)); + }); + + // Set up data handler after connection is established + this.socket.on('data', (chunk: Buffer) => { + this.handleData(chunk); + }); + + // Handle ongoing errors after initial connection + this.socket.on('error', err => { + // Reject all pending callbacks + const error = new Error(`Socket error: ${err.message}`); + for (const callback of this.pendingCallbacks) { + callback.reject(error); + } + this.pendingCallbacks = []; + }); + + this.socket.on('end', () => { + // Reject all pending callbacks + const error = new Error('Socket connection ended unexpectedly'); + for (const callback of this.pendingCallbacks) { + callback.reject(error); + } + this.pendingCallbacks = []; + }); + }); + } + + private handleData(chunk: Buffer): void { + let offset = 0; + + while (offset < chunk.length) { + if (this.readingLength) { + // Reading 4-byte length prefix + const bytesToCopy = Math.min(4 - this.lengthBytesRead, chunk.length - offset); + chunk.copy(this.lengthBuffer, this.lengthBytesRead, offset, offset + bytesToCopy); + this.lengthBytesRead += bytesToCopy; + offset += bytesToCopy; + + if (this.lengthBytesRead === 4) { + // Length is complete, switch to reading data + this.responseLength = this.lengthBuffer.readUInt32LE(0); + this.responseBuffer = Buffer.alloc(this.responseLength); + this.responseBytesRead = 0; + this.readingLength = false; + } + } else { + // Reading response data + const bytesToCopy = Math.min(this.responseLength - this.responseBytesRead, chunk.length - offset); + chunk.copy(this.responseBuffer!, this.responseBytesRead, offset, offset + bytesToCopy); + this.responseBytesRead += bytesToCopy; + offset += bytesToCopy; + + if (this.responseBytesRead === this.responseLength) { + // Response is complete - dequeue the next pending callback (FIFO) + const callback = this.pendingCallbacks.shift(); + if (callback) { + callback.resolve(new Uint8Array(this.responseBuffer!)); + } else { + // This shouldn't happen - response without a pending request + console.warn('Received response but no pending callback'); + } + + // Reset state for next message + this.readingLength = true; + this.lengthBytesRead = 0; + this.responseLength = 0; + this.responseBuffer = null; + this.responseBytesRead = 0; + } + } + } + } + + async call(inputBuffer: Uint8Array): Promise { + // Wait for connection to be established + await this.connectionPromise; + + if (!this.socket) { + throw new Error('Socket not connected'); + } + + return new Promise((resolve, reject) => { + // Enqueue this promise's callbacks (FIFO order) + this.pendingCallbacks.push({ resolve, reject }); + + // Write request: 4-byte little-endian length + msgpack data + // Socket will buffer these if needed, maintaining order + const lengthBuf = Buffer.alloc(4); + lengthBuf.writeUInt32LE(inputBuffer.length, 0); + this.socket!.write(lengthBuf); + this.socket!.write(inputBuffer); + }); + } + + private cleanup(): void { + // Reject any remaining pending callbacks + const error = new Error('Backend connection closed'); + for (const callback of this.pendingCallbacks) { + callback.reject(error); + } + this.pendingCallbacks = []; + + try { + // Remove all event listeners to prevent hanging + if (this.socket) { + this.socket.removeAllListeners(); + // Unref so socket doesn't keep event loop alive + // this.socket.unref(); + this.socket.destroy(); + } + } catch (e) { + // Ignore errors during cleanup + } + + // Clear connection timeout if still pending + if (this.connectionTimeout) { + clearTimeout(this.connectionTimeout); + this.connectionTimeout = null; + } + + // Remove process event listeners and unref to not block event loop + this.process.removeAllListeners(); + // this.process.unref(); + + // Don't try to unlink socket - bb owns it and will clean it up + } + + async destroy(): Promise { + // Cleanup first (closes socket, unrefs everything) + this.cleanup(); + + // Send SIGTERM for graceful shutdown + // Process is unref'd so won't block event loop - just kill and return + try { + this.process.kill('SIGTERM'); + } catch (e) { + // Already dead + } + } +} diff --git a/barretenberg/ts/src/bb_backends/node/platform.ts b/barretenberg/ts/src/bb_backends/node/platform.ts new file mode 100644 index 000000000000..bb16f16af274 --- /dev/null +++ b/barretenberg/ts/src/bb_backends/node/platform.ts @@ -0,0 +1,151 @@ +import * as path from 'path'; +import * as fs from 'fs'; +import { fileURLToPath } from 'url'; + +function getCurrentDir() { + if (typeof __dirname !== 'undefined') { + return __dirname; + } else { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return path.dirname(fileURLToPath(import.meta.url)); + } +} + +/** + * Find package root by climbing directory tree until package.json is found. + * @param startDir Starting directory to search from + * @returns Absolute path to package root, or null if not found + */ +export function findPackageRoot(): string | null { + let currentDir = getCurrentDir(); + const root = path.parse(currentDir).root; + + while (currentDir !== root) { + const packageJsonPath = path.join(currentDir, 'package.json'); + if (fs.existsSync(packageJsonPath)) { + return currentDir; + } + currentDir = path.dirname(currentDir); + } + + return null; +} + +/** + * Supported platform/architecture combinations. + */ +export type Platform = 'x86_64-linux' | 'x86_64-darwin' | 'aarch64-linux' | 'aarch64-darwin'; + +/** + * Map from Platform to build directory name. + */ +const PLATFORM_TO_BUILD_DIR: Record = { + 'x86_64-linux': 'amd64-linux', + 'x86_64-darwin': 'amd64-macos', + 'aarch64-linux': 'arm64-linux', + 'aarch64-darwin': 'arm64-macos', +}; + +/** + * Detect the current platform and architecture. + * @returns Platform identifier or null if unsupported + */ +export function detectPlatform(): Platform | null { + const arch = process.arch; // 'x64' | 'arm64' | ... + const platform = process.platform; // 'linux' | 'darwin' | 'win32' | ... + + if (arch === 'x64' && platform === 'linux') { + return 'x86_64-linux'; + } + if (arch === 'x64' && platform === 'darwin') { + return 'x86_64-darwin'; + } + if (arch === 'arm64' && platform === 'linux') { + return 'aarch64-linux'; + } + if (arch === 'arm64' && platform === 'darwin') { + return 'aarch64-darwin'; + } + + return null; +} + +/** + * Find the bb binary for the native backend. + * @param customPath Optional custom path to bb binary (overrides automatic detection) + * @returns Absolute path to bb binary, or null if not found + * + * Search order: + * 1. If customPath is provided and exists, return it + * 2. Otherwise search in /build//bb + */ +export function findBbBinary(customPath?: string): string | null { + // Check custom path first if provided + if (customPath) { + if (fs.existsSync(customPath)) { + return path.resolve(customPath); + } + // Custom path provided but doesn't exist - return null + return null; + } + + // Automatic detection + const platform = detectPlatform(); + if (!platform) { + return null; + } + + const buildDir = PLATFORM_TO_BUILD_DIR[platform]; + + // Get package root by climbing directory tree to find package.json + const packageRoot = findPackageRoot(); + + if (!packageRoot) { + return null; + } + + // Check in build//bb + const bbPath = path.join(packageRoot, 'build', buildDir, 'bb'); + + if (fs.existsSync(bbPath)) { + return bbPath; + } + + return null; +} + +export function findNapiBinary(customPath?: string): string | null { + // Check custom path first if provided + if (customPath) { + if (fs.existsSync(customPath)) { + return path.resolve(customPath); + } + // Custom path provided but doesn't exist - return null + return null; + } + + // Automatic detection + const platform = detectPlatform(); + if (!platform) { + return null; + } + + const buildDir = PLATFORM_TO_BUILD_DIR[platform]; + + // Get package root by climbing directory tree to find package.json + const packageRoot = findPackageRoot(); + + if (!packageRoot) { + return null; + } + + // Check in build//nodejs_module.node + const bbPath = path.join(packageRoot, 'build', buildDir, 'nodejs_module.node'); + + if (fs.existsSync(bbPath)) { + return bbPath; + } + + return null; +} diff --git a/barretenberg/ts/src/bb_backends/sync_to_async_adapter.ts b/barretenberg/ts/src/bb_backends/sync_to_async_adapter.ts new file mode 100644 index 000000000000..045c0e5f69b6 --- /dev/null +++ b/barretenberg/ts/src/bb_backends/sync_to_async_adapter.ts @@ -0,0 +1,21 @@ +import { IMsgpackBackendSync, IMsgpackBackendAsync } from './interface.js'; + +/** + * Adapter that wraps a synchronous backend to provide an async interface. + * The sync backend's blocking calls are simply wrapped in Promise.resolve(). + * + * This is useful for backends like shared memory where the call is actually + * synchronous but we want to use it with the async API. + */ +export class SyncToAsyncAdapter implements IMsgpackBackendAsync { + constructor(private syncBackend: IMsgpackBackendSync) {} + + async call(inputBuffer: Uint8Array): Promise { + // The sync backend blocks until complete, so just wrap in a resolved promise + return Promise.resolve(this.syncBackend.call(inputBuffer)); + } + + async destroy(): Promise { + this.syncBackend.destroy(); + } +} diff --git a/barretenberg/ts/src/bb_backends/wasm.ts b/barretenberg/ts/src/bb_backends/wasm.ts new file mode 100644 index 000000000000..c77c211fbd83 --- /dev/null +++ b/barretenberg/ts/src/bb_backends/wasm.ts @@ -0,0 +1,110 @@ +import { BarretenbergWasmMain, BarretenbergWasmMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/index.js'; +import { fetchModuleAndThreads } from '../barretenberg_wasm/index.js'; +import { IMsgpackBackendSync, IMsgpackBackendAsync } from './interface.js'; +import { createMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/factory/node/index.js'; +import { getRemoteBarretenbergWasm } from '../barretenberg_wasm/helpers/index.js'; +import { createDebugLogger } from '../log/index.js'; +import { proxy } from 'comlink'; + +/** + * Synchronous WASM backend that wraps BarretenbergWasmMain. + * Encapsulates all WASM initialization and memory management. + */ +export class BarretenbergWasmSyncBackend implements IMsgpackBackendSync { + private constructor(private wasm: BarretenbergWasmMain) {} + + /** + * Create and initialize a synchronous WASM backend. + * @param wasmPath Optional path to WASM files + * @param logger Optional logging function + */ + static async new(wasmPath?: string, logger?: (msg: string) => void): Promise { + const wasm = new BarretenbergWasmMain(); + const { module, threads } = await fetchModuleAndThreads(1, wasmPath, logger); + await wasm.init(module, threads, logger); + return new BarretenbergWasmSyncBackend(wasm); + } + + call(inputBuffer: Uint8Array): Uint8Array { + return this.wasm.cbindCall('bbapi', inputBuffer); + } + + destroy(): void { + // BarretenbergWasmMain has async destroy, but for sync API we call it without awaiting + // This is consistent with the synchronous semantics expected by the caller + void this.wasm.destroy(); + } +} + +/** + * Asynchronous WASM backend that supports both direct WASM and worker-based modes. + * + * Worker mode (default): Runs WASM on a worker thread to avoid blocking the main thread. + * - Browser-safe: Won't block UI during long operations + * - Overhead: ~3-4x slower due to serialize/deserialize for each call + * - Use for: Browser environments, long-running operations + * + * Direct mode (useWorker: false): Runs WASM directly on the calling thread. + * - Performance: ~3-4x faster (no serialize/deserialize overhead) + * - Warning: Will block the thread during operations + * - Use for: Node.js, benchmarks, tight loops where performance is critical + */ +export class BarretenbergWasmAsyncBackend implements IMsgpackBackendAsync { + private constructor( + private wasm: BarretenbergWasmMain | BarretenbergWasmMainWorker, + private worker?: any, + ) {} + + /** + * Create and initialize an asynchronous WASM backend. + * @param options.threads Number of threads (defaults to hardware max, up to 32 for parallel proving) + * @param options.wasmPath Optional path to WASM files + * @param options.logger Optional logging function + * @param options.memory Optional initial and maximum memory configuration + * @param options.useWorker Run on worker thread (default: true for browser safety) + */ + static async new( + options: { + threads?: number; + wasmPath?: string; + logger?: (msg: string) => void; + memory?: { initial?: number; maximum?: number }; + useWorker?: boolean; + } = {}, + ): Promise { + // Default to worker mode for browser safety + const useWorker = options.useWorker ?? true; + + if (useWorker) { + // Worker-based mode: runs on worker thread (browser-safe) + const worker = await createMainWorker(); + const wasm = getRemoteBarretenbergWasm(worker); + const { module, threads } = await fetchModuleAndThreads(options.threads, options.wasmPath, options.logger); + await wasm.init( + module, + threads, + proxy(options.logger ?? createDebugLogger('bb_wasm_async')), + options.memory?.initial, + options.memory?.maximum, + ); + return new BarretenbergWasmAsyncBackend(wasm, worker); + } else { + // Direct mode: runs on calling thread (faster but blocks thread) + const wasm = new BarretenbergWasmMain(); + const { module, threads } = await fetchModuleAndThreads(options.threads, options.wasmPath, options.logger); + await wasm.init(module, threads, options.logger, options.memory?.initial, options.memory?.maximum); + return new BarretenbergWasmAsyncBackend(wasm); + } + } + + async call(inputBuffer: Uint8Array): Promise { + return this.wasm.cbindCall('bbapi', inputBuffer); + } + + async destroy(): Promise { + await this.wasm.destroy(); + if (this.worker) { + await this.worker.terminate(); + } + } +} diff --git a/barretenberg/ts/src/bindgen/function_declaration.ts b/barretenberg/ts/src/bindgen/function_declaration.ts deleted file mode 100644 index 643df916c544..000000000000 --- a/barretenberg/ts/src/bindgen/function_declaration.ts +++ /dev/null @@ -1,11 +0,0 @@ -export interface Arg { - name: string; - type: string; -} - -export interface FunctionDeclaration { - functionName: string; - inArgs: Arg[]; - outArgs: Arg[]; - isAsync: boolean; -} diff --git a/barretenberg/ts/src/bindgen/index.ts b/barretenberg/ts/src/bindgen/index.ts deleted file mode 100644 index 89190f775c23..000000000000 --- a/barretenberg/ts/src/bindgen/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { generateRustCode } from './rust.js'; -import { generateTypeScriptCode } from './typescript.js'; - -const [, , exp = '../exports.json', lang = 'ts'] = process.argv; - -function generateCode(exports: string, lang: string) { - switch (lang) { - case 'ts': - return generateTypeScriptCode(exports); - case 'rust': - return generateRustCode(exports); - default: - throw new Error(`Unknown lang: ${lang}`); - } -} - -console.log(generateCode(exp, lang)); diff --git a/barretenberg/ts/src/bindgen/mappings.ts b/barretenberg/ts/src/bindgen/mappings.ts deleted file mode 100644 index 2b5356a84831..000000000000 --- a/barretenberg/ts/src/bindgen/mappings.ts +++ /dev/null @@ -1,67 +0,0 @@ -/* eslint-disable camelcase */ -const typeMap: { [key: string]: string } = { - in_ptr: 'Ptr', - out_ptr: 'Ptr', - 'bb::fr::in_buf': 'Fr', - 'bb::fr::vec_in_buf': 'Fr[]', - 'fr::in_buf': 'Fr', - 'fr::out_buf': 'Fr', - 'fr::vec_in_buf': 'Fr[]', - 'fr::vec_out_buf': 'Fr[]', - 'fq::in_buf': 'Fq', - 'fq::out_buf': 'Fq', - 'fq::vec_in_buf': 'Fq[]', - 'fq::vec_out_buf': 'Fq[]', - 'const uint8_t *': 'Uint8Array', - uint8_vec_vec_in_buf: 'Uint8Array[]', - 'uint8_t **': 'Uint8Array', - in_str_buf: 'string', - out_str_buf: 'string', - in_buf32: 'Buffer32', - out_buf32: 'Buffer32', - 'uint32_t *': 'number', - 'const uint32_t *': 'number', - 'affine_element::in_buf': 'Point', - 'affine_element::out_buf': 'Point', - 'const bool *': 'boolean', - 'bool *': 'boolean', - 'multisig::MultiSigPublicKey::vec_in_buf': 'Buffer128[]', - 'multisig::MultiSigPublicKey::out_buf': 'Buffer128', - 'multisig::RoundOnePublicOutput::vec_in_buf': 'Buffer128[]', - 'multisig::RoundOnePublicOutput::out_buf': 'Buffer128', - 'multisig::RoundOnePrivateOutput::in_buf': 'Buffer128', - 'multisig::RoundOnePrivateOutput::out_buf': 'Buffer128', -}; - -const deserializerMap: { [key: string]: string } = { - out_ptr: 'Ptr', - 'fr::out_buf': 'Fr', - 'fr::vec_out_buf': 'VectorDeserializer(Fr)', - 'fq::out_buf': 'Fq', - 'fq::vec_out_buf': 'VectorDeserializer(Fq)', - 'uint8_t **': 'BufferDeserializer()', - out_str_buf: 'StringDeserializer()', - out_buf32: 'Buffer32', - 'uint32_t *': 'NumberDeserializer()', - 'affine_element::out_buf': 'Point', - 'bool *': 'BoolDeserializer()', - 'multisig::MultiSigPublicKey::out_buf': 'Buffer128', - 'multisig::RoundOnePublicOutput::out_buf': 'Buffer128', - 'multisig::RoundOnePrivateOutput::out_buf': 'Buffer128', -}; - -export function mapType(type: string) { - if (typeMap[type]) { - return typeMap[type]; - } - throw new Error(`Unknown type: ${type}`); -} - -export const mapRustType = mapType; - -export function mapDeserializer(type: string) { - if (deserializerMap[type]) { - return deserializerMap[type]; - } - throw new Error(`Unknown deserializer for type: ${type}`); -} diff --git a/barretenberg/ts/src/bindgen/rust.ts b/barretenberg/ts/src/bindgen/rust.ts deleted file mode 100644 index 9393d813f528..000000000000 --- a/barretenberg/ts/src/bindgen/rust.ts +++ /dev/null @@ -1,52 +0,0 @@ -import fs from 'fs'; -import { Arg, FunctionDeclaration } from './function_declaration.js'; -import { mapDeserializer, mapRustType } from './mappings.js'; - -export function generateRustCode(filename: string) { - const fileContent = fs.readFileSync(filename, 'utf-8'); - const functionDeclarations: FunctionDeclaration[] = JSON.parse(fileContent); - - let output = ` -// WARNING: FILE CODE GENERATED BY BINDGEN UTILITY. DO NOT EDIT! -use crate::call_wasm_export::call_wasm_export; -use crate::serialize::{BufferDeserializer, NumberDeserializer, VectorDeserializer, BoolDeserializer}; -use crate::types::{Fr, Fq, Point, Buffer32, Buffer128}; -`; - - for (const { functionName, inArgs, outArgs } of functionDeclarations) { - const parameters = inArgs.map(({ name, type }) => `${name}: ${mapRustType(type)}`).join(', '); - const inArgsVar = `let in_args = vec![${inArgs.map(arg => arg.name).join(', ')}];`; - const outTypesVar = `let out_types = vec![${outArgs.map(arg => mapDeserializer(arg.type)).join(', ')}];`; - const wasmCall = `let result = call_wasm_export(&"${functionName}", &in_args, &out_types)?;`; - - const returnStmt = getReturnStmt(outArgs); - const returnType = - outArgs.length === 0 - ? '-> Result<(), Box>' - : `-> Result<(${outArgs.map(a => mapRustType(a.type)).join(', ')}), Box>`; - - const functionDecl = ` -pub fn ${functionName}(${parameters})${returnType} { - ${inArgsVar} - ${outTypesVar} - ${wasmCall} - ${returnStmt} -} -`; - - output += functionDecl; - } - - return output; -} - -function getReturnStmt(outArgs: Arg[]) { - switch (outArgs.length) { - case 0: - return 'Ok(())'; - case 1: - return `Ok(result[0].clone())`; - default: - return `Ok((${outArgs.map((_, idx) => `result[${idx}].clone()`).join(', ')}))`; - } -} diff --git a/barretenberg/ts/src/bindgen/to_camel_case.ts b/barretenberg/ts/src/bindgen/to_camel_case.ts deleted file mode 100644 index 9e7a56175714..000000000000 --- a/barretenberg/ts/src/bindgen/to_camel_case.ts +++ /dev/null @@ -1,10 +0,0 @@ -export function toCamelCase(input: string): string { - const words = input.split('_'); - const camelCasedWords = words.map((word, index) => { - if (index === 0) { - return word; - } - return word.charAt(0).toUpperCase() + word.slice(1); - }); - return camelCasedWords.join(''); -} diff --git a/barretenberg/ts/src/bindgen/typescript.ts b/barretenberg/ts/src/bindgen/typescript.ts deleted file mode 100644 index cd7b837ac23e..000000000000 --- a/barretenberg/ts/src/bindgen/typescript.ts +++ /dev/null @@ -1,116 +0,0 @@ -import fs from 'fs'; -import { mapDeserializer, mapType } from './mappings.js'; -import { toCamelCase } from './to_camel_case.js'; -import { FunctionDeclaration } from './function_declaration.js'; - -export function generateTypeScriptCode(filename: string) { - const fileContent = fs.readFileSync(filename, 'utf-8'); - const functionDeclarations: FunctionDeclaration[] = JSON.parse(fileContent); - - let output = `// WARNING: FILE CODE GENERATED BY BINDGEN UTILITY. DO NOT EDIT! -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { BarretenbergWasmMain, BarretenbergWasmMainWorker } from '../barretenberg_wasm/barretenberg_wasm_main/index.js'; -import { BufferDeserializer, NumberDeserializer, VectorDeserializer, BoolDeserializer, StringDeserializer, serializeBufferable, OutputType } from '../serialize/index.js'; -import { Fr, Point, Buffer32, Ptr } from '../types/index.js'; - -`; - - output += generateClass(functionDeclarations); - output += generateSyncClass(functionDeclarations); - - return output; -} - -function generateClass(functionDeclarations: FunctionDeclaration[]) { - let output = ` -export class BarretenbergApi { - constructor(protected wasm: BarretenbergWasmMainWorker) {} - -`; - - for (const { functionName, inArgs, outArgs } of functionDeclarations) { - try { - const parameters = inArgs.map(({ name, type }) => `${toCamelCase(name)}: ${mapType(type)}`).join(', '); - const inArgsVar = `const inArgs = [${inArgs - .map(arg => toCamelCase(arg.name)) - .join(', ')}].map(serializeBufferable);`; - const outTypesVar = `const outTypes: OutputType[] = [${outArgs - .map(arg => mapDeserializer(arg.type)) - .join(', ')}];`; - const wasmCall = `const result = await this.wasm.callWasmExport('${functionName}', inArgs, outTypes.map(t=>t.SIZE_IN_BYTES));`; - const outVar = `const out = result.map((r, i) => outTypes[i].fromBuffer(r));`; - - const n = outArgs.length; - const returnStmt = n === 0 ? 'return;' : n === 1 ? 'return out[0];' : 'return out as any;'; - const returnType = - outArgs.length === 0 - ? 'void' - : outArgs.length === 1 - ? `${mapType(outArgs[0].type)}` - : `[${outArgs.map(a => mapType(a.type)).join(', ')}]`; - - output += ` - async ${toCamelCase(functionName)}(${parameters}): Promise<${returnType}> { - ${inArgsVar} - ${outTypesVar} - ${wasmCall} - ${outVar} - ${returnStmt} - } -`; - } catch (err: any) { - throw new Error(`Function ${functionName}: ${err.message}`); - } - } - - output += `}`; - - return output; -} - -function generateSyncClass(functionDeclarations: FunctionDeclaration[]) { - let output = ` -export class BarretenbergApiSync { - constructor(protected wasm: BarretenbergWasmMain) {} - -`; - - for (const { functionName, inArgs, outArgs } of functionDeclarations) { - try { - const parameters = inArgs.map(({ name, type }) => `${toCamelCase(name)}: ${mapType(type)}`).join(', '); - const inArgsVar = `const inArgs = [${inArgs - .map(arg => toCamelCase(arg.name)) - .join(', ')}].map(serializeBufferable);`; - const outTypesVar = `const outTypes: OutputType[] = [${outArgs - .map(arg => mapDeserializer(arg.type)) - .join(', ')}];`; - const wasmCall = `const result = this.wasm.callWasmExport('${functionName}', inArgs, outTypes.map(t=>t.SIZE_IN_BYTES));`; - const outVar = `const out = result.map((r, i) => outTypes[i].fromBuffer(r));`; - - const n = outArgs.length; - const returnStmt = n === 0 ? 'return;' : n === 1 ? 'return out[0];' : 'return out as any;'; - const returnType = - outArgs.length === 0 - ? 'void' - : outArgs.length === 1 - ? `${mapType(outArgs[0].type)}` - : `[${outArgs.map(a => mapType(a.type)).join(', ')}]`; - - output += ` - ${toCamelCase(functionName)}(${parameters}): ${returnType} { - ${inArgsVar} - ${outTypesVar} - ${wasmCall} - ${outVar} - ${returnStmt} - } -`; - } catch (err: any) { - throw new Error(`Function ${functionName}: ${err.message}`); - } - } - - output += `}`; - - return output; -} diff --git a/barretenberg/ts/src/cbind/README.md b/barretenberg/ts/src/cbind/README.md index 14e7dd31c4e3..9cd309aa9801 100644 --- a/barretenberg/ts/src/cbind/README.md +++ b/barretenberg/ts/src/cbind/README.md @@ -1 +1 @@ -Derive bindings from the reported scheme of msgpack from bb. Currently redundant with bindgen, until that is supplanted. +Derives bindings from the reported msgpack schema from bb. diff --git a/barretenberg/ts/src/cbind/generate.ts b/barretenberg/ts/src/cbind/generate.ts index 694c8a47d140..4dc7f9bdd8d9 100644 --- a/barretenberg/ts/src/cbind/generate.ts +++ b/barretenberg/ts/src/cbind/generate.ts @@ -11,7 +11,6 @@ import { createSharedTypesCompiler, createSyncApiCompiler, createAsyncApiCompiler, - createNativeApiCompiler, type SchemaCompiler, } from './schema_compiler.js'; @@ -39,11 +38,6 @@ const GENERATORS: GeneratorConfig[] = [ outputFile: 'generated/async.ts', createCompiler: createAsyncApiCompiler, }, - { - name: 'Native API', - outputFile: 'generated/native.ts', - createCompiler: createNativeApiCompiler, - }, ]; // @ts-ignore diff --git a/barretenberg/ts/src/cbind/schema_compiler.ts b/barretenberg/ts/src/cbind/schema_compiler.ts index d51781f59363..35d5bdfaeea6 100644 --- a/barretenberg/ts/src/cbind/schema_compiler.ts +++ b/barretenberg/ts/src/cbind/schema_compiler.ts @@ -34,7 +34,7 @@ export interface FunctionMetadata { // Compiler configuration export interface CompilerConfig { - mode: 'types' | 'sync' | 'async' | 'native'; + mode: 'types' | 'sync' | 'async'; imports?: string[]; wasmImport?: string; } @@ -291,10 +291,15 @@ export class SchemaCompiler { switch (type) { case 'array': { const [subtype, size] = args[0]; + // Special case: byte arrays should be Uint8Array + if (subtype === 'unsigned char') { + return { typeName: 'Uint8Array' }; + } + // For other types, use T[] - idiomatic TypeScript for fixed-length homogeneous arrays const subtypeInfo = this.processSchema(subtype); return { - typeName: `Tuple<${subtypeInfo.typeName}, ${size}>`, - msgpackTypeName: `Tuple<${subtypeInfo.msgpackTypeName || subtypeInfo.typeName}, ${size}>`, + typeName: `${subtypeInfo.typeName}[]`, + msgpackTypeName: `${subtypeInfo.msgpackTypeName || subtypeInfo.typeName}[]`, }; } @@ -387,6 +392,7 @@ export class SchemaCompiler { case 'unsigned int': case 'unsigned short': case 'unsigned long': + case 'unsigned char': case 'double': return { typeName: 'number' }; case 'string': @@ -597,29 +603,43 @@ ${methods} const className = this.getApiClassName(); const methods = this.functionMetadata.map(m => this.generateApiMethod(m)).join('\n\n'); - if (this.config.mode === 'native') { - return this.generateNativeApiClass(methods); - } - // For sync API, don't implement BbApiBase since methods are synchronous const implementsClause = this.config.mode === 'sync' ? '' : ' implements BbApiBase'; + // For tracing all calls to bb. + // const msgpackCallHelper = + // `${this.config.mode === 'async' ? 'async ' : ''}function msgpackCall(backend: ${this.getBackendType()}, input: any[]) {\n` + + // ` const commandName = input[0]?.[0] || 'unknown';\n` + + // ` process.stderr.write(\`[BB MSGPACK ${this.config.mode === 'async' ? 'ASYNC' : 'SYNC'}] \${commandName}\\n\`);\n` + + // ` const inputBuffer = new Encoder({ useRecords: false }).pack(input);\n` + + // ` const encodedResult = ${this.config.mode === 'async' ? 'await ' : ''}backend.call(inputBuffer);\n` + + // ` const result = new Decoder({ useRecords: false }).unpack(encodedResult);\n` + + // ` process.stderr.write(\`[BB MSGPACK ${this.config.mode === 'async' ? 'ASYNC' : 'SYNC'}] \${commandName} => completed\\n\`);\n` + + // ` return result;\n` + + // `}\n`; const msgpackCallHelper = - `${this.config.mode === 'async' ? 'async ' : ''}function msgpackCall(wasm: ${this.getWasmType()}, cbind: string, input: any[]) {` + + `${this.config.mode === 'async' ? 'async ' : ''}function msgpackCall(backend: ${this.getBackendType()}, input: any[]) {` + ` const inputBuffer = new Encoder({ useRecords: false }).pack(input);` + - ` const encodedResult = ${this.config.mode === 'async' ? 'await ' : ''}wasm.cbindCall(cbind, inputBuffer);` + + ` const encodedResult = ${this.config.mode === 'async' ? 'await ' : ''}backend.call(inputBuffer);` + ` return new Decoder({ useRecords: false }).unpack(encodedResult);` + `}\n`; + const destroyMethod = + this.config.mode === 'sync' + ? ` destroy(): void { + if (this.backend.destroy) this.backend.destroy(); + }` + : ` destroy(): Promise { + return this.backend.destroy ? this.backend.destroy() : Promise.resolve(); + }`; + return ( msgpackCallHelper + `export class ${className}${implementsClause} { - constructor(protected wasm: ${this.getWasmType()}) {} + constructor(protected backend: ${this.getBackendType()}) {} ${methods} - destroy(): Promise { - return this.wasm.destroy(); - } +${destroyMethod} }` ); } @@ -630,19 +650,17 @@ ${methods} return 'SyncApi'; case 'async': return 'AsyncApi'; - case 'native': - return 'NativeApi'; default: throw new Error(`Invalid mode: ${this.config.mode}`); } } - private getWasmType(): string { + private getBackendType(): string { switch (this.config.mode) { case 'sync': - return 'BarretenbergWasmMain'; + return 'IMsgpackBackendSync'; case 'async': - return 'BarretenbergWasmMainWorker'; + return 'IMsgpackBackendAsync'; default: return ''; } @@ -651,23 +669,11 @@ ${methods} private generateApiMethod(metadata: FunctionMetadata): string { const { name, commandType, responseType } = metadata; - if (this.config.mode === 'native') { - return ` ${name}(command: ${commandType}): Promise<${responseType}> { - const msgpackCommand = from${commandType}(command); - return this.sendCommand(['${metadata.commandType}', msgpackCommand]).then(([variantName, result]: [string, any]) => { - if (variantName !== '${responseType}') { - throw new Error(\`Expected variant name '${responseType}' but got '\${variantName}'\`); - } - return to${responseType}(result); - }); - }`; - } - // For async mode, queue immediately and return promise if (this.config.mode === 'async') { return ` ${name}(command: ${commandType}): Promise<${responseType}> { const msgpackCommand = from${commandType}(command); - return msgpackCall(this.wasm, 'bbapi', [["${capitalize(name)}", msgpackCommand]]).then(([variantName, result]: [string, any]) => { + return msgpackCall(this.backend, [["${capitalize(name)}", msgpackCommand]]).then(([variantName, result]: [string, any]) => { if (variantName !== '${responseType}') { throw new Error(\`Expected variant name '${responseType}' but got '\${variantName}'\`); } @@ -679,131 +685,13 @@ ${methods} // For sync mode, keep the synchronous behavior return ` ${name}(command: ${commandType}): ${responseType} { const msgpackCommand = from${commandType}(command); - const [variantName, result] = msgpackCall(this.wasm, 'bbapi', [["${capitalize(name)}", msgpackCommand]]); + const [variantName, result] = msgpackCall(this.backend, [["${capitalize(name)}", msgpackCommand]]); if (variantName !== '${responseType}') { throw new Error(\`Expected variant name '${responseType}' but got '\${variantName}'\`); } return to${responseType}(result); }`; } - - private generateNativeApiClass(methods: string): string { - return `interface NativeApiRequest { - resolve: (value: any) => void; - reject: (error: any) => void; -} - -class StreamBuffer { - private buffer = Buffer.alloc(0); - private expectedLength: number | null = null; - - addData(data: Buffer): Buffer[] { - // Create buffer to grow as needed - const newBuffer = Buffer.allocUnsafe(this.buffer.length + data.length); - this.buffer.copy(newBuffer, 0); - data.copy(newBuffer, this.buffer.length); - this.buffer = newBuffer; - - const messages: Buffer[] = []; - - while (true) { - if (this.expectedLength === null) { - if (this.buffer.length < 4) break; - this.expectedLength = this.buffer.readUInt32LE(0); - this.buffer = this.buffer.subarray(4); - } - - if (this.buffer.length < this.expectedLength) break; - - // Extract complete message - const messageBuffer = this.buffer.subarray(0, this.expectedLength); - messages.push(messageBuffer); - this.buffer = this.buffer.subarray(this.expectedLength); - this.expectedLength = null; - } - - return messages; - } -} - -export class NativeApi implements BbApiBase { - private decoder = new Decoder({ useRecords: false }); - private encoder = new Encoder({ useRecords: false }); - private pendingRequests: NativeApiRequest[] = []; - - private constructor(private proc: ChildProcess) {} - - static async new(bbPath = 'bb', logger = console.log): Promise { - const proc = spawn(bbPath, ['msgpack', 'run'], { - stdio: ['pipe', 'pipe', 'pipe'], - }); - - if (!proc.stdout || !proc.stdin) { - throw new Error('Failed to initialize bb process'); - } - - const api = new NativeApi(proc); - const streamBuffer = new StreamBuffer(); - - proc.stdout.on('data', (data: Buffer) => { - const messages = streamBuffer.addData(data); - - for (const messageBuffer of messages) { - const pendingRequest = api.pendingRequests.shift(); - if (!pendingRequest) { - throw new Error('Received response without a pending request'); - } - - try { - const decoded = api.decoder.decode(messageBuffer); - if (!Array.isArray(decoded) || decoded.length !== 2) { - throw new Error(\`Invalid response format: \${JSON.stringify(decoded)}\`); - } - const [variantName, result] = decoded; - pendingRequest.resolve([variantName, result]); - } catch (error) { - pendingRequest.reject(error); - break; - } - } - }); - - proc.stderr.on('data', (data: Buffer) => { - logger(data.toString().trim()); - }); - - proc.on('error', err => { - throw new Error(err.message); - }); - return api; - } - - private sendCommand(command: any): Promise { - return new Promise((resolve, reject) => { - this.pendingRequests.push({ resolve, reject }); - const encoded = this.encoder.encode(command); - - // Write length prefix (4 bytes, little-endian) - const lengthBuffer = Buffer.allocUnsafe(4); - lengthBuffer.writeUInt32LE(encoded.length, 0); - - // Write length prefix followed by the encoded data - this.proc.stdin!.write(lengthBuffer); - this.proc.stdin!.write(encoded); - }); - } - - async close(): Promise { - this.proc.kill(); - } - - destroy(): Promise { - return this.close(); - } - -${methods} -}`; - } } // Factory methods for creating configured compilers @@ -818,7 +706,7 @@ export function createSyncApiCompiler(): SchemaCompiler { return new SchemaCompiler({ mode: 'sync', imports: [ - `import { BarretenbergWasmMain } from "../../barretenberg_wasm/barretenberg_wasm_main/index.js";`, + `import { IMsgpackBackendSync } from '../../bb_backends/interface.js';`, `import { Decoder, Encoder } from 'msgpackr';`, ], }); @@ -828,15 +716,8 @@ export function createAsyncApiCompiler(): SchemaCompiler { return new SchemaCompiler({ mode: 'async', imports: [ - `import { BarretenbergWasmMainWorker } from "../../barretenberg_wasm/barretenberg_wasm_main/index.js";`, + `import { IMsgpackBackendAsync } from '../../bb_backends/interface.js';`, `import { Decoder, Encoder } from 'msgpackr';`, ], }); } - -export function createNativeApiCompiler(): SchemaCompiler { - return new SchemaCompiler({ - mode: 'native', - imports: [`import { spawn, ChildProcess } from 'child_process';`, `import { Decoder, Encoder } from 'msgpackr';`], - }); -} diff --git a/barretenberg/ts/src/index.ts b/barretenberg/ts/src/index.ts index c8330ef3f640..1043441f84a3 100644 --- a/barretenberg/ts/src/index.ts +++ b/barretenberg/ts/src/index.ts @@ -1,6 +1,7 @@ export { Crs, GrumpkinCrs } from './crs/index.js'; export { type BackendOptions, + BackendType, Barretenberg, BarretenbergSync, UltraHonkVerifierBackend, @@ -9,5 +10,5 @@ export { } from './barretenberg/index.js'; export { randomBytes } from './random/index.js'; -export { RawBuffer, Fr } from './types/index.js'; +export { Fr } from './types/index.js'; export { splitHonkProof, reconstructHonkProof, deflattenFields, type ProofData } from './proof/index.js'; diff --git a/barretenberg/ts/src/main.ts b/barretenberg/ts/src/main.ts deleted file mode 100755 index 5aebedc74bac..000000000000 --- a/barretenberg/ts/src/main.ts +++ /dev/null @@ -1,534 +0,0 @@ -#!/usr/bin/env node -import 'source-map-support/register.js'; -import { Crs, Barretenberg, RawBuffer } from './index.js'; -import { createDebugLogger, initLogger } from './log/index.js'; -import { readFileSync, writeFileSync } from 'fs'; -import { gunzipSync } from 'zlib'; -import { Command } from 'commander'; -import { UltraHonkBackendOptions } from './barretenberg/backend.js'; - -let debug: (msg: string) => void; - -const threads = +process.env.HARDWARE_CONCURRENCY! || undefined; - -function getBytecode(bytecodePath: string): Uint8Array { - const extension = bytecodePath.substring(bytecodePath.lastIndexOf('.') + 1); - - if (extension == 'json') { - const encodedCircuit = JSON.parse(readFileSync(bytecodePath, 'utf8')); - const decompressed = gunzipSync(Buffer.from(encodedCircuit.bytecode, 'base64')); - return Uint8Array.from(decompressed); - } - - const encodedCircuit = readFileSync(bytecodePath); - const decompressed = gunzipSync(encodedCircuit); - return Uint8Array.from(decompressed); -} - -// TODO(https://github.com/AztecProtocol/barretenberg/issues/1126): split this into separate Plonk and Honk functions as their gate count differs -async function getGatesUltra(bytecodePath: string, recursive: boolean, honkRecursion: boolean, api: Barretenberg) { - const { total } = await computeCircuitSize(bytecodePath, recursive, honkRecursion, api); - return total; -} - -function getWitness(witnessPath: string): Uint8Array { - const data = readFileSync(witnessPath); - const decompressed = gunzipSync(data); - return Uint8Array.from(decompressed); -} - -async function computeCircuitSize(bytecodePath: string, recursive: boolean, honkRecursion: boolean, api: Barretenberg) { - debug(`Computing circuit size for ${bytecodePath}`); - const bytecode = getBytecode(bytecodePath); - const [total, subgroup] = await api.acirGetCircuitSizes(bytecode, recursive, honkRecursion); - return { total, subgroup }; -} - -async function initUltraHonk(bytecodePath: string, crsPath: string) { - const api = await Barretenberg.new({ - threads, - }); - - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1248): Get rid of this call to avoid building the circuit twice. - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1126): use specific UltraHonk function - // recursive here is useless for UH, as it does not affect anything - const circuitSize = await getGatesUltra(bytecodePath, /*recursive=*/ false, /*honkRecursion=*/ true, api); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/811): remove subgroupSizeOverride hack for goblin - const dyadicCircuitSize = Math.pow(2, Math.ceil(Math.log2(circuitSize))); - - debug(`Loading CRS for UltraHonk with circuit-size=${circuitSize} dyadic-circuit-size=${dyadicCircuitSize}`); - const crs = await Crs.new(dyadicCircuitSize + 1, crsPath); - - // Load CRS into wasm global CRS state. - // TODO: Make RawBuffer be default behavior, and have a specific Vector type for when wanting length prefixed. - await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - return { api, circuitSize, dyadicCircuitSize }; -} - -async function initLite(crsPath: string) { - const api = await Barretenberg.new({ threads: 1 }); - - // Plus 1 needed! (Move +1 into Crs?) - const crs = await Crs.new(1, crsPath); - - // Load CRS into wasm global CRS state. - await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - - return { api }; -} - -export async function proveAndVerifyUltraHonk(bytecodePath: string, witnessPath: string, crsPath: string) { - /* eslint-disable camelcase */ - const { api } = await initUltraHonk(bytecodePath, crsPath); - try { - const bytecode = getBytecode(bytecodePath); - const witness = getWitness(witnessPath); - - const verified = await api.acirProveAndVerifyUltraHonk(bytecode, witness); - return verified; - } finally { - await api.destroy(); - } - /* eslint-enable camelcase */ -} - -export async function proveAndVerifyMegaHonk(bytecodePath: string, witnessPath: string, crsPath: string) { - /* eslint-disable camelcase */ - const { api } = await initUltraHonk(bytecodePath, crsPath); - try { - const bytecode = getBytecode(bytecodePath); - const witness = getWitness(witnessPath); - - const verified = await api.acirProveAndVerifyMegaHonk(bytecode, witness); - return verified; - } finally { - await api.destroy(); - } - /* eslint-enable camelcase */ -} - -export async function gateCountUltra(bytecodePath: string, recursive: boolean, honkRecursion: boolean) { - const api = await Barretenberg.new({ threads: 1 }); - try { - const numberOfGates = await getGatesUltra(bytecodePath, recursive, honkRecursion, api); - debug(`Number of gates: ${numberOfGates}`); - // Create an 8-byte buffer and write the number into it. - // Writing number directly to stdout will result in a variable sized - // input depending on the size. - const buffer = Buffer.alloc(8); - buffer.writeBigInt64LE(BigInt(numberOfGates)); - - process.stdout.write(Uint8Array.from(buffer)); - } finally { - await api.destroy(); - } -} - -export async function contractUltraHonk(bytecodePath: string, vkPath: string, crsPath: string, outputPath: string) { - const { api } = await initUltraHonk(bytecodePath, crsPath); - try { - debug(`Creating UltraHonk verifier contract bytecode=${bytecodePath} vk=${vkPath}`); - const bytecode = getBytecode(bytecodePath); - const vk = new RawBuffer(readFileSync(vkPath)); - const contract = await api.acirHonkSolidityVerifier(bytecode, vk); - - if (outputPath === '-') { - process.stdout.write(contract); - debug(`Solidity verifier contract written to stdout`); - } else { - writeFileSync(outputPath, contract); - debug(`Solidity verifier contract written to ${outputPath}`); - } - } finally { - await api.destroy(); - } -} - -export async function proveUltraHonk( - bytecodePath: string, - witnessPath: string, - crsPath: string, - vkPath: string, - outputPath: string, - options?: UltraHonkBackendOptions, -) { - const { api } = await initUltraHonk(bytecodePath, crsPath); - try { - debug(`Creating UltraHonk proof bytecode=${bytecodePath}`); - const bytecode = getBytecode(bytecodePath); - const witness = getWitness(witnessPath); - - const acirProveUltraHonk = options?.keccak - ? api.acirProveUltraKeccakHonk.bind(api) - : options?.keccakZK - ? api.acirProveUltraKeccakZkHonk.bind(api) - : options?.starknet - ? api.acirProveUltraStarknetHonk.bind(api) - : options?.starknetZK - ? api.acirProveUltraStarknetZkHonk.bind(api) - : api.acirProveUltraZKHonk.bind(api); - const proof = await acirProveUltraHonk(bytecode, witness, new RawBuffer(readFileSync(vkPath))); - - if (outputPath === '-') { - process.stdout.write(proof); - debug(`Proof written to stdout`); - } else { - writeFileSync(outputPath, proof); - debug(`Proof written to ${outputPath}`); - } - } finally { - await api.destroy(); - } -} - -export async function writeVkUltraHonk( - bytecodePath: string, - crsPath: string, - outputPath: string, - options?: UltraHonkBackendOptions, -) { - const { api } = await initUltraHonk(bytecodePath, crsPath); - try { - const bytecode = getBytecode(bytecodePath); - debug(`Initializing UltraHonk verification key bytecode=${bytecodePath}`); - - const acirWriteVkUltraHonk = options?.keccak - ? api.acirWriteVkUltraKeccakHonk.bind(api) - : options?.keccakZK - ? api.acirWriteVkUltraKeccakZkHonk.bind(api) - : options?.starknet - ? api.acirWriteVkUltraStarknetHonk.bind(api) - : options?.starknetZK - ? api.acirWriteVkUltraStarknetZkHonk.bind(api) - : api.acirWriteVkUltraHonk.bind(api); - const vk = await acirWriteVkUltraHonk(bytecode); - - if (outputPath === '-') { - process.stdout.write(vk); - debug(`Verification key written to stdout`); - } else { - writeFileSync(outputPath, vk); - debug(`Verification key written to ${outputPath}`); - } - } finally { - await api.destroy(); - } -} - -export async function verifyUltraHonk( - proofPath: string, - vkPath: string, - crsPath: string, - options?: UltraHonkBackendOptions, -) { - const { api } = await initLite(crsPath); - try { - const acirVerifyUltraHonk = options?.keccak - ? api.acirVerifyUltraKeccakHonk.bind(api) - : options?.keccakZK - ? api.acirVerifyUltraKeccakZkHonk.bind(api) - : options?.starknet - ? api.acirVerifyUltraStarknetHonk.bind(api) - : options?.starknetZK - ? api.acirVerifyUltraStarknetZkHonk.bind(api) - : api.acirVerifyUltraZKHonk.bind(api); - const verified = await acirVerifyUltraHonk( - Uint8Array.from(readFileSync(proofPath)), - new RawBuffer(readFileSync(vkPath)), - ); - - debug(`Verification ${verified ? 'successful' : 'failed'}`); - return verified; - } finally { - await api.destroy(); - } -} - -export async function proofAsFieldsUltraHonk(proofPath: string, outputPath: string, crsPath: string) { - const { api } = await initLite(crsPath); - try { - debug(`Outputting UltraHonk proof as vector of fields proof=${proofPath}`); - const proofAsFields = await api.acirProofAsFieldsUltraHonk(Uint8Array.from(readFileSync(proofPath))); - const jsonProofAsFields = JSON.stringify(proofAsFields.map(f => f.toString())); - - if (outputPath === '-') { - process.stdout.write(jsonProofAsFields); - debug(`Proof as fields written to stdout`); - } else { - writeFileSync(outputPath, jsonProofAsFields); - debug(`Proof as fields written to ${outputPath}`); - } - } finally { - await api.destroy(); - } -} - -export async function vkAsFieldsUltraHonk(vkPath: string, vkeyOutputPath: string, crsPath: string) { - const { api } = await initLite(crsPath); - - try { - debug(`Serializing vk byte array into field elements vk=${vkPath}`); - const vkAsFields = await api.acirVkAsFieldsUltraHonk(new RawBuffer(readFileSync(vkPath))); - const jsonVKAsFields = JSON.stringify(vkAsFields.map(f => f.toString())); - - if (vkeyOutputPath === '-') { - process.stdout.write(jsonVKAsFields); - debug(`Verification key as fields written to stdout`); - } else { - writeFileSync(vkeyOutputPath, jsonVKAsFields); - debug(`Verification key as fields written to ${vkeyOutputPath}`); - } - } finally { - await api.destroy(); - } -} - -const program = new Command('bb'); - -program.option('-v, --verbose', 'enable verbose logging', false); -program.option('-c, --crs-path ', 'set crs path', './crs'); - -function handleGlobalOptions() { - initLogger({ useStdErr: true, level: program.opts().verbose ? 'debug' : 'info' }); - debug = createDebugLogger('bb'); - return { crsPath: program.opts().crsPath }; -} - -const deprecatedCommandError = () => async () => { - console.error( - `Error: UltraPlonk is now deprecated (see https://github.com/AztecProtocol/barretenberg/issues/1377). Use UltraHonk!`, - ); - process.exit(1); -}; - -program - .command('prove_and_verify') - .description('Generate a proof and verify it. Process exits with success or failure code. [DEPRECATED]') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-r, --recursive', 'Whether to use a SNARK friendly proof', false) - .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .action(deprecatedCommandError()); - -program - .command('prove_and_verify_ultra_honk') - .description('Generate an UltraHonk proof and verify it. Process exits with success or failure code.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .action(async ({ bytecodePath, witnessPath }) => { - const { crsPath } = handleGlobalOptions(); - const result = await proveAndVerifyUltraHonk(bytecodePath, witnessPath, crsPath); - process.exit(result ? 0 : 1); - }); - -program - .command('prove_and_verify_mega_honk') - .description('Generate a MegaHonk proof and verify it. Process exits with success or failure code.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .action(async ({ bytecodePath, witnessPath }) => { - const { crsPath } = handleGlobalOptions(); - const result = await proveAndVerifyMegaHonk(bytecodePath, witnessPath, crsPath); - process.exit(result ? 0 : 1); - }); - -program - .command('prove') - .description('Generate a proof and write it to a file. [DEPRECATED]') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-r, --recursive', 'Create a SNARK friendly proof', false) - .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') - .action(deprecatedCommandError()); - -program - .command('gates') - .description('Print Ultra Builder gate count to standard output.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-r, --recursive', 'Create a SNARK friendly proof', false) - .option('-hr, --honk-recursion', 'Specify whether to use UltraHonk recursion', false) - .action(async ({ bytecodePath, recursive, honkRecursion: honkRecursion }) => { - handleGlobalOptions(); - await gateCountUltra(bytecodePath, recursive, honkRecursion); - }); - -program - .command('verify') - .description('Verify a proof. Process exists with success or failure code. [DEPRECATED]') - .requiredOption('-p, --proof-path ', 'Specify the path to the proof') - .requiredOption('-k, --vk ', 'path to a verification key. avoids recomputation.') - .action(deprecatedCommandError()); - -program - .command('contract') - .description('Output solidity verification key contract. [DEPRECATED]') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-o, --output-path ', 'Specify the path to write the contract', './target/contract.sol') - .requiredOption('-k, --vk-path ', 'Path to a verification key. avoids recomputation.') - .action(deprecatedCommandError()); - -program - .command('contract_ultra_honk') - .description('Output solidity verification key contract.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-o, --output-path ', 'Specify the path to write the contract', './target/contract.sol') - .requiredOption('-k, --vk-path ', 'Path to a verification key.') - .action(async ({ bytecodePath, outputPath, vkPath }) => { - const { crsPath } = handleGlobalOptions(); - await contractUltraHonk(bytecodePath, vkPath, crsPath, outputPath); - }); - -program - .command('write_vk') - .description('Output verification key. [DEPRECATED]') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-r, --recursive', 'Create a SNARK friendly proof', false) - .option('-o, --output-path ', 'Specify the path to write the key') - .action(deprecatedCommandError()); - -program - .command('write_pk') - .description('Output proving key. [DEPRECATED]') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-r, --recursive', 'Create a SNARK friendly proof', false) - .requiredOption('-o, --output-path ', 'Specify the path to write the key') - .action(deprecatedCommandError()); - -program - .command('proof_as_fields') - .description('Return the proof as fields elements. [DEPRECATED]') - .requiredOption('-p, --proof-path ', 'Specify the proof path') - .requiredOption('-k, --vk-path ', 'Path to verification key.') - .requiredOption('-o, --output-path ', 'Specify the JSON path to write the proof fields') - .action(deprecatedCommandError()); - -program - .command('vk_as_fields') - .description( - 'Return the verification key represented as fields elements. Also return the verification key hash. [DEPRECATED]', - ) - .requiredOption('-k, --vk-path ', 'Path to verification key.') - .requiredOption('-o, --output-path ', 'Specify the JSON path to write the verification key fields and key hash') - .action(deprecatedCommandError()); - -program - .command('prove_ultra_honk') - .description('Generate a proof and write it to a file.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .option('-k, --vk-path ', 'path to a verification key. avoids recomputation.') - .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') - .action(async ({ bytecodePath, witnessPath, vkPath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - debug(`Creating UltraHonk proof bytecodePath=${bytecodePath}, witnessPath=${witnessPath}, vkPath=${vkPath}`); - await proveUltraHonk(bytecodePath, witnessPath, crsPath, vkPath, outputPath); - }); - -program - .command('prove_ultra_keccak_honk') - .description('Generate a proof and write it to a file.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .option('-k, --vk-path ', 'path to a verification key. avoids recomputation.') - .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') - .action(async ({ bytecodePath, witnessPath, vkPath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - await proveUltraHonk(bytecodePath, witnessPath, crsPath, vkPath, outputPath, { keccak: true }); - }); - -program - .command('prove_ultra_starknet_honk') - .description('Generate a proof and write it to a file.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .option('-k, --vk-path ', 'path to a verification key. avoids recomputation.') - .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') - .action(async ({ bytecodePath, witnessPath, vkPath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - await proveUltraHonk(bytecodePath, witnessPath, crsPath, vkPath, outputPath, { starknet: true }); - }); - -program - .command('write_vk_ultra_honk') - .description('Output verification key.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .requiredOption('-o, --output-path ', 'Specify the path to write the key') - .action(async ({ bytecodePath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - debug(`Writing verification key to ${outputPath}`); - await writeVkUltraHonk(bytecodePath, crsPath, outputPath); - }); - -program - .command('write_vk_ultra_keccak_honk') - .description('Output verification key.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .requiredOption('-o, --output-path ', 'Specify the path to write the key') - .action(async ({ bytecodePath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - await writeVkUltraHonk(bytecodePath, crsPath, outputPath, { keccak: true }); - }); - -program - .command('write_vk_ultra_starknet_honk') - .description('Output verification key.') - .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') - .requiredOption('-o, --output-path ', 'Specify the path to write the key') - .action(async ({ bytecodePath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - await writeVkUltraHonk(bytecodePath, crsPath, outputPath, { starknet: true }); - }); - -program - .command('verify_ultra_honk') - .description('Verify a proof. Process exists with success or failure code.') - .requiredOption('-p, --proof-path ', 'Specify the path to the proof') - .requiredOption('-k, --vk-path ', 'path to a verification key. avoids recomputation.') - .action(async ({ proofPath, vkPath }) => { - const { crsPath } = handleGlobalOptions(); - const result = await verifyUltraHonk(proofPath, vkPath, crsPath); - process.exit(result ? 0 : 1); - }); - -program - .command('verify_ultra_keccak_honk') - .description('Verify a proof. Process exists with success or failure code.') - .requiredOption('-p, --proof-path ', 'Specify the path to the proof') - .requiredOption('-k, --vk-path ', 'path to a verification key. avoids recomputation.') - .action(async ({ proofPath, vkPath }) => { - const { crsPath } = handleGlobalOptions(); - const result = await verifyUltraHonk(proofPath, vkPath, crsPath, { keccak: true }); - process.exit(result ? 0 : 1); - }); - -program - .command('verify_ultra_starknet_honk') - .description('Verify a proof. Process exists with success or failure code.') - .requiredOption('-p, --proof-path ', 'Specify the path to the proof') - .requiredOption('-k, --vk ', 'path to a verification key. avoids recomputation.') - .action(async ({ proofPath, vk }) => { - const { crsPath } = handleGlobalOptions(); - const result = await verifyUltraHonk(proofPath, vk, crsPath, { starknet: true }); - process.exit(result ? 0 : 1); - }); - -program - .command('proof_as_fields_honk') - .description('Return the proof as fields elements') - .requiredOption('-p, --proof-path ', 'Specify the proof path') - .requiredOption('-o, --output-path ', 'Specify the JSON path to write the proof fields') - .action(async ({ proofPath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - await proofAsFieldsUltraHonk(proofPath, outputPath, crsPath); - }); - -program - .command('vk_as_fields_ultra_honk') - .description('Return the verification key represented as fields elements.') - .requiredOption('-k, --vk-path ', 'Path to verification key.') - .requiredOption('-o, --output-path ', 'Specify the JSON path to write the verification key fields.') - .action(async ({ vkPath, outputPath }) => { - const { crsPath } = handleGlobalOptions(); - await vkAsFieldsUltraHonk(vkPath, outputPath, crsPath); - }); - -program.name('bb.js').parse(process.argv); diff --git a/barretenberg/ts/src/serialize/index.ts b/barretenberg/ts/src/serialize/index.ts index bdd49908abec..b91a12e66dd1 100644 --- a/barretenberg/ts/src/serialize/index.ts +++ b/barretenberg/ts/src/serialize/index.ts @@ -1,3 +1,2 @@ export * from './buffer_reader.js'; -export * from './output_type.js'; export * from './serialize.js'; diff --git a/barretenberg/ts/src/serialize/output_type.ts b/barretenberg/ts/src/serialize/output_type.ts deleted file mode 100644 index bb4f7778b402..000000000000 --- a/barretenberg/ts/src/serialize/output_type.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { BufferReader } from './buffer_reader.js'; - -export interface OutputType { - SIZE_IN_BYTES?: number; - fromBuffer: (b: Uint8Array | BufferReader) => T; -} - -export function BoolDeserializer(): OutputType { - return { - SIZE_IN_BYTES: 1, - fromBuffer: (buf: Uint8Array | BufferReader) => { - const reader = BufferReader.asReader(buf); - return reader.readBoolean(); - }, - }; -} - -export function NumberDeserializer(): OutputType { - return { - SIZE_IN_BYTES: 4, - fromBuffer: (buf: Uint8Array | BufferReader) => { - const reader = BufferReader.asReader(buf); - return reader.readNumber(); - }, - }; -} - -export function VectorDeserializer(t: OutputType): OutputType { - return { - fromBuffer: (buf: Uint8Array | BufferReader) => { - const reader = BufferReader.asReader(buf); - return reader.readVector(t); - }, - }; -} - -export function BufferDeserializer(): OutputType { - return { - fromBuffer: (buf: Uint8Array | BufferReader) => { - const reader = BufferReader.asReader(buf); - return reader.readBuffer(); - }, - }; -} - -export function StringDeserializer(): OutputType { - return { - fromBuffer: (buf: Uint8Array | BufferReader) => { - const reader = BufferReader.asReader(buf); - return reader.readString(); - }, - }; -} diff --git a/barretenberg/ts/src/serialize/serialize.ts b/barretenberg/ts/src/serialize/serialize.ts index b09caac6b06f..65c03e3ae547 100644 --- a/barretenberg/ts/src/serialize/serialize.ts +++ b/barretenberg/ts/src/serialize/serialize.ts @@ -1,5 +1,3 @@ -import { RawBuffer } from '../types/raw_buffer.js'; - // For serializing bool. export function boolToBuffer(b: boolean) { const buf = new Uint8Array(1); @@ -7,13 +5,6 @@ export function boolToBuffer(b: boolean) { return buf; } -// For serializing numbers to 32 bit little-endian form. -export function numToUInt32LE(n: number, bufferSize = 4) { - const buf = new Uint8Array(bufferSize); - new DataView(buf.buffer).setUint32(buf.byteLength - 4, n, true); - return buf; -} - // For serializing numbers to 32 bit big-endian form. export function numToUInt32BE(n: number, bufferSize = 4) { const buf = new Uint8Array(bufferSize); @@ -28,13 +19,6 @@ export function numToInt32BE(n: number, bufferSize = 4) { return buf; } -// For serializing numbers to 8 bit form. -export function numToUInt8(n: number) { - const buf = new Uint8Array(1); - buf[0] = n; - return buf; -} - export function concatenateUint8Arrays(arrayOfUint8Arrays: Uint8Array[]) { const totalLength = arrayOfUint8Arrays.reduce((prev, curr) => prev + curr.length, 0); const result = new Uint8Array(totalLength); @@ -46,10 +30,6 @@ export function concatenateUint8Arrays(arrayOfUint8Arrays: Uint8Array[]) { return result; } -export function uint8ArrayToHexString(uint8Array: Uint8Array) { - return uint8Array.reduce((accumulator, byte) => accumulator + byte.toString(16).padStart(2, '0'), ''); -} - // For serializing a buffer as a vector. export function serializeBufferToVector(buf: Uint8Array) { return concatenateUint8Arrays([numToInt32BE(buf.length), buf]); @@ -63,71 +43,11 @@ export function serializeBigInt(n: bigint, width = 32) { return buf; } -export function deserializeBigInt(buf: Uint8Array, offset = 0, width = 32) { - let result = 0n; - for (let i = 0; i < width; i++) { - result = (result << BigInt(8)) | BigInt(buf[offset + i]); - } - return { elem: result, adv: width }; -} - -export function serializeDate(date: Date) { - return serializeBigInt(BigInt(date.getTime()), 8); -} - -export function deserializeBufferFromVector(vector: Uint8Array, offset = 0) { - const length = new DataView(vector.buffer, vector.byteOffset + offset, 4).getUint32(0, false); - const adv = 4 + length; - const elem = vector.slice(offset + 4, offset + adv); - return { elem, adv }; -} - -export function deserializeBool(buf: Uint8Array, offset = 0) { - const adv = 1; - const elem = buf[offset] !== 0; - return { elem, adv }; -} - -export function deserializeUInt32(buf: Uint8Array, offset = 0) { - const adv = 4; - const elem = new DataView(buf.buffer, buf.byteOffset + offset, adv).getUint32(0, false); - return { elem, adv }; -} - -export function deserializeInt32(buf: Uint8Array, offset = 0) { - const adv = 4; - const elem = new DataView(buf.buffer, buf.byteOffset + offset, adv).getInt32(0, false); - return { elem, adv }; -} - -export function deserializeField(buf: Uint8Array, offset = 0) { - const adv = 32; - const elem = buf.slice(offset, offset + adv); - return { elem, adv }; -} - // For serializing an array of fixed length elements. export function serializeBufferArrayToVector(arr: Uint8Array[]) { return concatenateUint8Arrays([numToUInt32BE(arr.length), ...arr.flat()]); } -export function deserializeArrayFromVector( - deserialize: (buf: Uint8Array, offset: number) => { elem: T; adv: number }, - vector: Uint8Array, - offset = 0, -) { - let pos = offset; - const size = new DataView(vector.buffer, vector.byteOffset + pos, 4).getUint32(0, false); - pos += 4; - const arr = new Array(size); - for (let i = 0; i < size; ++i) { - const { elem, adv } = deserialize(vector, pos); - pos += adv; - arr[i] = elem; - } - return { elem: arr, adv: pos - offset }; -} - /** A type that can be written to a buffer. */ export type Bufferable = boolean | Uint8Array | number | string | { toBuffer: () => Uint8Array } | Bufferable[]; @@ -139,8 +59,6 @@ export type Bufferable = boolean | Uint8Array | number | string | { toBuffer: () export function serializeBufferable(obj: Bufferable): Uint8Array { if (Array.isArray(obj)) { return serializeBufferArrayToVector(obj.map(serializeBufferable)); - } else if (obj instanceof RawBuffer) { - return obj; } else if (obj instanceof Uint8Array) { return serializeBufferToVector(obj); } else if (typeof obj === 'boolean') { diff --git a/barretenberg/ts/src/types/fields.ts b/barretenberg/ts/src/types/fields.ts index 509bc3adf8a4..184db9cfc359 100644 --- a/barretenberg/ts/src/types/fields.ts +++ b/barretenberg/ts/src/types/fields.ts @@ -5,7 +5,7 @@ import { bigIntToBufferBE, bigIntToUint8ArrayBE, } from '../bigint-array/index.js'; -import { BufferReader, uint8ArrayToHexString } from '../serialize/index.js'; +import { BufferReader } from '../serialize/index.js'; // TODO(#4189): Replace with implementation in yarn-project/foundation/src/fields/fields.ts /** @@ -61,7 +61,7 @@ export class Fr { } toString() { - return '0x' + uint8ArrayToHexString(this.toBuffer()); + return '0x' + this.toBuffer().reduce((accumulator, byte) => accumulator + byte.toString(16).padStart(2, '0'), ''); } equals(rhs: Fr) { diff --git a/barretenberg/ts/src/types/fixed_size_buffer.ts b/barretenberg/ts/src/types/fixed_size_buffer.ts deleted file mode 100644 index b020301cf1ef..000000000000 --- a/barretenberg/ts/src/types/fixed_size_buffer.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { randomBytes } from '../random/index.js'; -import { BufferReader } from '../serialize/index.js'; - -export class Buffer32 { - static SIZE_IN_BYTES = 32; - - constructor(public readonly buffer: Uint8Array) {} - - static fromBuffer(buffer: Uint8Array | BufferReader) { - const reader = BufferReader.asReader(buffer); - return new Buffer32(reader.readBytes(this.SIZE_IN_BYTES)); - } - - static random() { - return new Buffer32(randomBytes(this.SIZE_IN_BYTES)); - } - - toBuffer() { - return this.buffer; - } -} - -export class Buffer64 { - static SIZE_IN_BYTES = 64; - - constructor(public readonly buffer: Uint8Array) {} - - static fromBuffer(buffer: Uint8Array | BufferReader) { - const reader = BufferReader.asReader(buffer); - return new Buffer64(reader.readBytes(this.SIZE_IN_BYTES)); - } - - static random() { - return new Buffer64(randomBytes(this.SIZE_IN_BYTES)); - } - - toBuffer() { - return this.buffer; - } -} - -export class Buffer128 { - static SIZE_IN_BYTES = 128; - - constructor(public readonly buffer: Uint8Array) {} - - static fromBuffer(buffer: Uint8Array | BufferReader) { - const reader = BufferReader.asReader(buffer); - return new Buffer128(reader.readBytes(this.SIZE_IN_BYTES)); - } - - static random() { - return new Buffer128(randomBytes(this.SIZE_IN_BYTES)); - } - - toBuffer() { - return this.buffer; - } -} diff --git a/barretenberg/ts/src/types/index.ts b/barretenberg/ts/src/types/index.ts index cc5bba551c3b..226f91159f6f 100644 --- a/barretenberg/ts/src/types/index.ts +++ b/barretenberg/ts/src/types/index.ts @@ -1,5 +1,2 @@ -export * from './ptr.js'; export * from './fields.js'; export * from './point.js'; -export * from './fixed_size_buffer.js'; -export * from './raw_buffer.js'; diff --git a/barretenberg/ts/src/types/ptr.ts b/barretenberg/ts/src/types/ptr.ts deleted file mode 100644 index 7f6e2687ce47..000000000000 --- a/barretenberg/ts/src/types/ptr.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { BufferReader } from '../serialize/index.js'; - -/** - * Holds an opaque pointer into WASM memory. - * Currently only 4 bytes, but could grow to 8 bytes with wasm64. - */ -export class Ptr { - static SIZE_IN_BYTES = 4; - - constructor(public readonly value: Uint8Array) {} - - static fromBuffer(buffer: Uint8Array | BufferReader) { - const reader = BufferReader.asReader(buffer); - return new this(reader.readBytes(this.SIZE_IN_BYTES)); - } - - toBuffer() { - return this.value; - } -} diff --git a/barretenberg/ts/src/types/raw_buffer.ts b/barretenberg/ts/src/types/raw_buffer.ts deleted file mode 100644 index de2696ee620c..000000000000 --- a/barretenberg/ts/src/types/raw_buffer.ts +++ /dev/null @@ -1,3 +0,0 @@ -// Used when the data is to be sent exactly as is. i.e. no length prefix will be added. -// This is useful for sending structured data that can be parsed-as-you-go, as opposed to just an array of bytes. -export class RawBuffer extends Uint8Array {} diff --git a/barretenberg/ts/tsconfig.browser.json b/barretenberg/ts/tsconfig.browser.json index 1f2fbbd4a569..abfcbc16a125 100644 --- a/barretenberg/ts/tsconfig.browser.json +++ b/barretenberg/ts/tsconfig.browser.json @@ -2,6 +2,7 @@ "extends": "./tsconfig.esm.json", "compilerOptions": { "outDir": "dest/browser", + "noEmit": false }, "include": ["src"], "exclude": ["src/main.ts", "src/**/*.test.ts", "src/bindgen", "src/benchmark"] diff --git a/barretenberg/ts/tsconfig.cjs.json b/barretenberg/ts/tsconfig.cjs.json index 8f49e5c3004f..5bad4d557468 100644 --- a/barretenberg/ts/tsconfig.cjs.json +++ b/barretenberg/ts/tsconfig.cjs.json @@ -3,6 +3,7 @@ "compilerOptions": { "module": "CommonJS", "outDir": "dest/node-cjs", - "tsBuildInfoFile": "/dev/null" + "tsBuildInfoFile": "/dev/null", + "noEmit": false } } diff --git a/barretenberg/ts/tsconfig.esm.json b/barretenberg/ts/tsconfig.esm.json index 343b0c7c116f..2fd1e0bfab1b 100644 --- a/barretenberg/ts/tsconfig.esm.json +++ b/barretenberg/ts/tsconfig.esm.json @@ -3,5 +3,6 @@ "compilerOptions": { "outDir": "dest/node", "tsBuildInfoFile": "/dev/null", + "noEmit": false, } } diff --git a/barretenberg/ts/tsconfig.json b/barretenberg/ts/tsconfig.json index 29d15ff06ab6..57961deb2f9e 100644 --- a/barretenberg/ts/tsconfig.json +++ b/barretenberg/ts/tsconfig.json @@ -16,6 +16,7 @@ "skipLibCheck": true, "composite": true, "rootDir": "src", + "noEmit": true, }, "include": ["src"] } diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index 3062d4041573..3a31eb99eca6 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -2,6 +2,7 @@ import { sha256 } from '@aztec/foundation/crypto'; import type { LogFn, Logger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import type { AvmCircuitInputs, AvmCircuitPublicInputs } from '@aztec/stdlib/avm'; +import { Barretenberg } from '@aztec/bb.js'; import * as proc from 'child_process'; import { promises as fs } from 'fs'; @@ -197,6 +198,47 @@ function getArgs(flavor: UltraHonkFlavor) { } } +/** + * Converts UltraHonkFlavor to proof settings for msgpack API + */ +function getProofSettingsFromFlavor(flavor: UltraHonkFlavor): { + ipaAccumulation: boolean; + oracleHashType: string; + disableZk: boolean; + optimizedSolidityVerifier: boolean; +} { + switch (flavor) { + case 'ultra_honk': + return { + ipaAccumulation: false, + oracleHashType: 'poseidon2', + disableZk: true, + optimizedSolidityVerifier: false, + }; + case 'ultra_keccak_honk': + return { + ipaAccumulation: false, + oracleHashType: 'keccak', + disableZk: true, + optimizedSolidityVerifier: false, + }; + case 'ultra_starknet_honk': + return { + ipaAccumulation: false, + oracleHashType: 'starknet', + disableZk: true, + optimizedSolidityVerifier: false, + }; + case 'ultra_rollup_honk': + return { + ipaAccumulation: true, + oracleHashType: 'poseidon2', + disableZk: true, + optimizedSolidityVerifier: false, + }; + } +} + /** * Used for generating proofs of noir circuits. * It is assumed that the working directory is a temporary and/or random directory used solely for generating this proof. @@ -395,6 +437,80 @@ export async function verifyProof( ); } +/** + * POC: Verifies proofs using bb.js msgpack API instead of CLI + * @param proofFullPath - The full path to the proof to be verified + * @param verificationKeyPath - The full path to the circuit verification key + * @param ultraHonkFlavor - The flavor of the proof + * @param log - A logging function + * @returns An object containing a result indication and duration taken + */ +export async function verifyProofMsgpack( + proofFullPath: string, + verificationKeyPath: string, + ultraHonkFlavor: UltraHonkFlavor, + log: Logger, +): Promise { + try { + log.verbose('bb-prover (verify) using msgpack API'); + + // Read proof, verification key, and public inputs from disk + const proofDir = proofFullPath.substring(0, proofFullPath.lastIndexOf('/')); + const publicInputsFullPath = join(proofDir, PUBLIC_INPUTS_FILENAME); + log.debug(`public inputs path: ${publicInputsFullPath}`); + + const [proofBuffer, vkBuffer, publicInputsBuffer] = await Promise.all([ + fs.readFile(proofFullPath), + fs.readFile(verificationKeyPath), + fs.readFile(publicInputsFullPath), + ]); + + // Convert proof buffer to array of 32-byte field elements + const proof: Uint8Array[] = []; + for (let i = 0; i < proofBuffer.length; i += 32) { + proof.push(new Uint8Array(proofBuffer.subarray(i, i + 32))); + } + + // Convert public inputs buffer to array of 32-byte field elements + const publicInputs: Uint8Array[] = []; + for (let i = 0; i < publicInputsBuffer.length; i += 32) { + publicInputs.push(new Uint8Array(publicInputsBuffer.subarray(i, i + 32))); + } + + // Create Barretenberg instance with native backend + const api = await Barretenberg.new({ threads: 1 }); + + try { + const timer = new Timer(); + + // Call circuitVerify via msgpack API + const { verified } = await api.circuitVerify({ + verificationKey: new Uint8Array(vkBuffer), + publicInputs, + proof, + settings: getProofSettingsFromFlavor(ultraHonkFlavor), + }); + + const duration = timer.ms(); + + if (verified) { + log.verbose(`Proof verified successfully via msgpack API in ${duration}ms`); + return { status: BB_RESULT.SUCCESS, durationMs: duration }; + } else { + return { + status: BB_RESULT.FAILURE, + reason: 'Proof verification failed via msgpack API', + }; + } + } finally { + // Always clean up the API instance + await api.destroy(); + } + } catch (error) { + return { status: BB_RESULT.FAILURE, reason: `${error}` }; + } +} + export async function verifyAvmProof( pathToBB: string, workingDirectory: string, diff --git a/yarn-project/bb-prover/src/bb/msgpack_api.test.ts b/yarn-project/bb-prover/src/bb/msgpack_api.test.ts new file mode 100644 index 000000000000..6f00539b8eb4 --- /dev/null +++ b/yarn-project/bb-prover/src/bb/msgpack_api.test.ts @@ -0,0 +1,150 @@ +import { describe, expect, it } from '@jest/globals'; +import { Fr } from '@aztec/foundation/fields'; +import { Proof } from '@aztec/stdlib/proofs'; + +describe('BBMsgpackProver buffer conversions', () => { + describe('toMsgpackProof format', () => { + it('should split proof into public inputs and proof fields correctly', () => { + // Create a mock proof with 3 public inputs and 5 proof fields + const numPublicInputs = 3; + const publicInputsBuffer = Buffer.alloc(numPublicInputs * 32); + const proofBuffer = Buffer.alloc(5 * 32); + + // Fill with test data + for (let i = 0; i < numPublicInputs; i++) { + publicInputsBuffer.writeUInt32BE(i + 1, i * 32); + } + for (let i = 0; i < 5; i++) { + proofBuffer.writeUInt32BE(i + 10, i * 32); + } + + const fullBuffer = Buffer.concat([publicInputsBuffer, proofBuffer]); + const proof = new Proof(fullBuffer, numPublicInputs); + + // Manually implement toMsgpackProof logic for testing + const publicInputsSize = proof.numPublicInputs * 32; + const extractedPublicInputs = proof.buffer.subarray(0, publicInputsSize); + const extractedProof = proof.buffer.subarray(publicInputsSize); + + // Verify sizes + expect(extractedPublicInputs.length).toBe(numPublicInputs * 32); + expect(extractedProof.length).toBe(5 * 32); + + // Verify public inputs can be split into 32-byte chunks + const publicInputFields: Uint8Array[] = []; + for (let i = 0; i < extractedPublicInputs.length; i += 32) { + publicInputFields.push(new Uint8Array(extractedPublicInputs.subarray(i, i + 32))); + } + expect(publicInputFields.length).toBe(numPublicInputs); + + // Verify proof fields can be split into 32-byte chunks + const proofFields: Uint8Array[] = []; + for (let i = 0; i < extractedProof.length; i += 32) { + proofFields.push(new Uint8Array(extractedProof.subarray(i, i + 32))); + } + expect(proofFields.length).toBe(5); + }); + }); + + describe('fromMsgpackProof format', () => { + it('should reconstruct proof from field arrays correctly', () => { + // Create mock proof and public input fields + const proofFields: Uint8Array[] = []; + const publicInputFields: Uint8Array[] = []; + + for (let i = 0; i < 3; i++) { + const field = Buffer.alloc(32); + field.writeUInt32BE(i + 1, 0); + publicInputFields.push(new Uint8Array(field)); + } + + for (let i = 0; i < 5; i++) { + const field = Buffer.alloc(32); + field.writeUInt32BE(i + 10, 0); + proofFields.push(new Uint8Array(field)); + } + + // Reconstruct binary proof in Aztec format: [public_inputs, proof] + const publicInputsBuffer = Buffer.concat(publicInputFields.map(f => Buffer.from(f))); + const proofBuffer = Buffer.concat(proofFields.map(f => Buffer.from(f))); + const binaryProofWithPublicInputs = Buffer.concat([publicInputsBuffer, proofBuffer]); + + // Verify total size + expect(binaryProofWithPublicInputs.length).toBe((3 + 5) * 32); + + // Verify we can create a Proof object with correct public inputs count + const proof = new Proof(binaryProofWithPublicInputs, publicInputFields.length); + expect(proof.numPublicInputs).toBe(3); + expect(proof.buffer.length).toBe((3 + 5) * 32); + }); + + it('should convert field buffers to Fr array correctly', () => { + const proofFields: Uint8Array[] = []; + + for (let i = 0; i < 5; i++) { + const field = Buffer.alloc(32); + field.writeUInt32BE(i + 1, 28); // Write at offset 28 to place value at end + proofFields.push(new Uint8Array(field)); + } + + // Convert to Fr array (simulating fromMsgpackProof logic) + const proofFrs: Fr[] = []; + for (const field of proofFields) { + proofFrs.push(Fr.fromBuffer(Buffer.from(field))); + } + + expect(proofFrs.length).toBe(5); + // Verify each Fr can be converted to buffer and back + proofFrs.forEach((fr, i) => { + expect(fr.toBuffer().length).toBe(32); + }); + }); + }); + + describe('round-trip conversion', () => { + it('should preserve proof data through to/from msgpack conversion', () => { + // Create original proof + const numPublicInputs = 2; + const publicInputsBuffer = Buffer.alloc(numPublicInputs * 32); + const proofBuffer = Buffer.alloc(3 * 32); + + // Fill with test pattern + for (let i = 0; i < numPublicInputs * 32; i++) { + publicInputsBuffer[i] = i % 256; + } + for (let i = 0; i < 3 * 32; i++) { + proofBuffer[i] = (i + 100) % 256; + } + + const originalBuffer = Buffer.concat([publicInputsBuffer, proofBuffer]); + const originalProof = new Proof(originalBuffer, numPublicInputs); + + // Simulate toMsgpackProof + const publicInputsSize = originalProof.numPublicInputs * 32; + const publicInputsExtracted = originalProof.buffer.subarray(0, publicInputsSize); + const proofExtracted = originalProof.buffer.subarray(publicInputsSize); + + const publicInputFields: Uint8Array[] = []; + for (let i = 0; i < publicInputsExtracted.length; i += 32) { + publicInputFields.push(new Uint8Array(publicInputsExtracted.subarray(i, i + 32))); + } + + const proofFields: Uint8Array[] = []; + for (let i = 0; i < proofExtracted.length; i += 32) { + proofFields.push(new Uint8Array(proofExtracted.subarray(i, i + 32))); + } + + // Simulate fromMsgpackProof + const reconstructedPublicInputs = Buffer.concat(publicInputFields.map(f => Buffer.from(f))); + const reconstructedProof = Buffer.concat(proofFields.map(f => Buffer.from(f))); + const reconstructedBuffer = Buffer.concat([reconstructedPublicInputs, reconstructedProof]); + + const reconstructedProofObj = new Proof(reconstructedBuffer, publicInputFields.length); + + // Verify round-trip preserves data + expect(reconstructedProofObj.buffer.length).toBe(originalProof.buffer.length); + expect(reconstructedProofObj.numPublicInputs).toBe(originalProof.numPublicInputs); + expect(reconstructedProofObj.buffer.equals(originalProof.buffer)).toBe(true); + }); + }); +}); diff --git a/yarn-project/bb-prover/src/bb/msgpack_api.ts b/yarn-project/bb-prover/src/bb/msgpack_api.ts new file mode 100644 index 000000000000..2134a861ba53 --- /dev/null +++ b/yarn-project/bb-prover/src/bb/msgpack_api.ts @@ -0,0 +1,200 @@ +import type { Barretenberg } from '@aztec/bb.js'; +import { Fr } from '@aztec/foundation/fields'; +import type { Logger } from '@aztec/foundation/log'; +import { Proof, RecursiveProof } from '@aztec/stdlib/proofs'; +import type { VerificationKeyData } from '@aztec/stdlib/vks'; + +import type { UltraHonkFlavor } from '../honk.js'; + +/** + * Converts UltraHonkFlavor to proof settings for msgpack API + */ +function getProofSettingsFromFlavor(flavor: UltraHonkFlavor): { + ipaAccumulation: boolean; + oracleHashType: string; + disableZk: boolean; + optimizedSolidityVerifier: boolean; +} { + switch (flavor) { + case 'ultra_honk': + return { + ipaAccumulation: false, + oracleHashType: 'poseidon2', + disableZk: true, + optimizedSolidityVerifier: false, + }; + case 'ultra_keccak_honk': + return { + ipaAccumulation: false, + oracleHashType: 'keccak', + disableZk: true, + optimizedSolidityVerifier: false, + }; + case 'ultra_starknet_honk': + return { + ipaAccumulation: false, + oracleHashType: 'starknet', + disableZk: true, + optimizedSolidityVerifier: false, + }; + case 'ultra_rollup_honk': + return { + ipaAccumulation: true, + oracleHashType: 'poseidon2', + disableZk: true, + optimizedSolidityVerifier: false, + }; + } +} + +/** + * Wrapper around bb.js msgpack API for Aztec protocol circuit proving. + * Handles buffer conversions and proof format translations. + * Eliminates all file I/O - operates entirely on in-memory buffers. + */ +export class BBMsgpackProver { + constructor( + private api: Barretenberg, + private logger: Logger, + ) {} + + /** + * Generates a proof for a circuit using the msgpack API. + * All operations happen in memory - no file I/O. + * + * @param witness - Witness data as buffer (from ACVM) + * @param bytecode - Circuit bytecode + * @param verificationKey - Verification key bytes + * @param circuitName - Name of the circuit for logging + * @param flavor - UltraHonk flavor for proof settings + * @param proofLength - Expected proof length for validation + * @param vkData - Verification key metadata (for public inputs count) + * @returns Recursive proof in Aztec format + */ + async proveCircuit( + witness: Uint8Array, + bytecode: Uint8Array, + verificationKey: Uint8Array, + circuitName: string, + flavor: UltraHonkFlavor, + proofLength: PROOF_LENGTH, + vkData: VerificationKeyData, + ): Promise> { + this.logger.debug(`Proving ${circuitName} via msgpack API...`); + + // Call msgpack API - all in memory! + const { proof, publicInputs } = await this.api.circuitProve({ + witness, + circuit: { + name: circuitName, + bytecode: Buffer.from(bytecode), + verificationKey: Buffer.from(verificationKey), + }, + settings: getProofSettingsFromFlavor(flavor), + }); + + this.logger.debug( + `Proof generated via msgpack: ${proof.length} proof fields, ${publicInputs.length} public inputs`, + ); + + // Convert msgpack format to Aztec format + return this.fromMsgpackProof(proof, publicInputs, proofLength, vkData); + } + + /** + * Verifies a proof using the msgpack API. + * All operations happen in memory - no file I/O. + * + * @param proof - Proof in Aztec format + * @param verificationKey - Verification key bytes + * @param flavor - UltraHonk flavor for proof settings + * @throws Error if verification fails + */ + async verifyCircuit(proof: Proof, verificationKey: Uint8Array, flavor: UltraHonkFlavor): Promise { + this.logger.debug(`Verifying proof via msgpack API...`); + + // Convert Aztec proof format to msgpack format + const { proofFields, publicInputFields } = this.toMsgpackProof(proof); + + // Call msgpack API - all in memory! + const { verified } = await this.api.circuitVerify({ + verificationKey: Buffer.from(verificationKey), + publicInputs: publicInputFields, + proof: proofFields, + settings: getProofSettingsFromFlavor(flavor), + }); + + if (!verified) { + throw new Error('Proof verification failed via msgpack API'); + } + + this.logger.debug(`Proof verified successfully via msgpack API`); + } + + /** + * Converts Aztec Proof format to msgpack format. + * Msgpack expects proof and public inputs as separate arrays of 32-byte field elements. + * + * @param proof - Proof in Aztec format (binary buffer with embedded public inputs) + * @returns Proof and public inputs as arrays of Uint8Array (32-byte chunks) + */ + private toMsgpackProof(proof: Proof): { + proofFields: Uint8Array[]; + publicInputFields: Uint8Array[]; + } { + // Aztec Proof format: [public_inputs (numPublicInputs * 32 bytes), proof] + const publicInputsSize = proof.numPublicInputs * 32; + const publicInputsBuffer = proof.buffer.subarray(0, publicInputsSize); + const proofBuffer = proof.buffer.subarray(publicInputsSize); + + // Convert to arrays of 32-byte field elements + const publicInputFields: Uint8Array[] = []; + for (let i = 0; i < publicInputsBuffer.length; i += 32) { + publicInputFields.push(new Uint8Array(publicInputsBuffer.subarray(i, i + 32))); + } + + const proofFields: Uint8Array[] = []; + for (let i = 0; i < proofBuffer.length; i += 32) { + proofFields.push(new Uint8Array(proofBuffer.subarray(i, i + 32))); + } + + return { proofFields, publicInputFields }; + } + + /** + * Converts msgpack proof format to Aztec RecursiveProof format. + * Msgpack returns proof and public inputs as separate arrays of 32-byte field elements. + * + * @param proofFields - Proof as array of 32-byte field elements + * @param publicInputFields - Public inputs as array of 32-byte field elements + * @param proofLength - Expected number of proof fields + * @param vkData - Verification key metadata (for public inputs count) + * @returns RecursiveProof in Aztec format + */ + private fromMsgpackProof( + proofFields: Uint8Array[], + publicInputFields: Uint8Array[], + proofLength: PROOF_LENGTH, + vkData: VerificationKeyData, + ): RecursiveProof { + // Convert field arrays to Fr arrays for RecursiveProof + const proofFrs: Fr[] = []; + for (const field of proofFields) { + proofFrs.push(Fr.fromBuffer(Buffer.from(field))); + } + + // Validate proof length + if (proofFrs.length !== proofLength) { + throw new Error(`Proof length mismatch: expected ${proofLength}, got ${proofFrs.length}`); + } + + // Create binary proof in Aztec format: [public_inputs, proof] + const publicInputsBuffer = Buffer.concat(publicInputFields.map(f => Buffer.from(f))); + const proofBuffer = Buffer.concat(proofFields.map(f => Buffer.from(f))); + const binaryProofWithPublicInputs = Buffer.concat([publicInputsBuffer, proofBuffer]); + + const binaryProof = new Proof(binaryProofWithPublicInputs, publicInputFields.length); + + return new RecursiveProof(proofFrs, binaryProof, true, proofLength); + } +} diff --git a/yarn-project/bb-prover/src/config.ts b/yarn-project/bb-prover/src/config.ts index 60a33c9a67b6..3cda0a03a973 100644 --- a/yarn-project/bb-prover/src/config.ts +++ b/yarn-project/bb-prover/src/config.ts @@ -5,6 +5,8 @@ export interface BBConfig { bbSkipCleanup: boolean; numConcurrentIVCVerifiers: number; bbIVCConcurrency: number; + /** Number of threads to use for bb.js msgpack backend proving (defaults to 1) */ + bbThreads?: number; } export interface ACVMConfig { diff --git a/yarn-project/bb-prover/src/prover/server/bb_prover.ts b/yarn-project/bb-prover/src/prover/server/bb_prover.ts index 2dd4a4c681fe..d26a1d3b184f 100644 --- a/yarn-project/bb-prover/src/prover/server/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/server/bb_prover.ts @@ -1,3 +1,4 @@ +import { Barretenberg } from '@aztec/bb.js'; import { AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED, NESTED_RECURSIVE_PROOF_LENGTH, @@ -101,15 +102,13 @@ import { PUBLIC_INPUTS_FILENAME, VK_FILENAME, generateAvmProof, - generateProof, verifyAvmProof, - verifyProof, } from '../../bb/execute.js'; +import { BBMsgpackProver } from '../../bb/msgpack_api.js'; import type { ACVMConfig, BBConfig } from '../../config.js'; import { type UltraHonkFlavor, getUltraHonkFlavorForCircuit } from '../../honk.js'; import { ProverInstrumentation } from '../../instrumentation.js'; import { extractAvmVkData } from '../../verification_key/verification_key_data.js'; -import { readProofsFromOutputDirectory } from '../proof_utils.js'; const logger = createLogger('bb-prover'); @@ -123,6 +122,8 @@ export interface BBProverConfig extends BBConfig, ACVMConfig { */ export class BBNativeRollupProver implements ServerCircuitProver { private instrumentation: ProverInstrumentation; + private bbApi!: Barretenberg; + private bbMsgpackProver!: BBMsgpackProver; constructor( private config: BBProverConfig, @@ -143,7 +144,28 @@ export class BBNativeRollupProver implements ServerCircuitProver { logger.info(`Using native BB at ${config.bbBinaryPath} and working directory ${config.bbWorkingDirectory}`); logger.info(`Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`); - return new BBNativeRollupProver(config, telemetry); + const prover = new BBNativeRollupProver(config, telemetry); + + // Initialize bb.js msgpack native backend + logger.info(`Initializing bb.js msgpack backend with ${config.bbThreads || 1} threads...`); + prover.bbApi = await Barretenberg.new({ + threads: config.bbThreads || 1, + bbPath: config.bbBinaryPath, + }); + prover.bbMsgpackProver = new BBMsgpackProver(prover.bbApi, logger); + logger.info(`bb.js msgpack backend initialized successfully`); + + return prover; + } + + /** + * Cleanup resources - destroys the bb.js API instance + */ + async destroy() { + if (this.bbApi) { + logger.info('Destroying bb.js msgpack backend...'); + await this.bbApi.destroy(); + } } /** @@ -450,21 +472,24 @@ export class BBNativeRollupProver implements ServerCircuitProver { return makePublicInputsAndRecursiveProof(circuitOutput, proof, verificationKey); } - private async generateProofWithBB< + /** + * Generates a proof using bb.js msgpack API - NO FILE I/O for proving! + * ACVM still writes witness file (different binary), but BB proving happens entirely in memory. + */ + private async generateProofWithBBMsgpack< + PROOF_LENGTH extends number, Input extends { toBuffer: () => Buffer }, Output extends { toBuffer: () => Buffer }, >( input: Input, circuitType: ServerProtocolArtifact, + proofLength: PROOF_LENGTH, convertInput: (input: Input) => WitnessMap, convertOutput: (outputWitness: WitnessMap) => Output, workingDirectory: string, - ): Promise<{ circuitOutput: Output; provingResult: BBSuccess }> { - // Have the ACVM write the partial witness here + ): Promise<{ circuitOutput: Output; proof: RecursiveProof; durationMs: number }> { + // Still need ACVM for witness generation (different binary) const outputWitnessFile = path.join(workingDirectory, 'partial-witness.gz'); - - // Generate the partial witness using the ACVM - // A further temp directory will be created beneath ours and then cleaned up after the partial witness has been copied to our specified location const simulator = new NativeACVMSimulator( this.config.acvmWorkingDirectory, this.config.acvmBinaryPath, @@ -472,11 +497,10 @@ export class BBNativeRollupProver implements ServerCircuitProver { ); const artifact = getServerCircuitArtifact(circuitType); - logger.debug(`Generating witness data for ${circuitType}`); const inputWitness = convertInput(input); - const foreignCallHandler = undefined; // We don't handle foreign calls in the native ACVM simulator + const foreignCallHandler = undefined; const witnessResult = await simulator.executeProtocolCircuit(inputWitness, artifact, foreignCallHandler); const output = convertOutput(witnessResult.witness); @@ -493,31 +517,72 @@ export class BBNativeRollupProver implements ServerCircuitProver { eventName: 'circuit-witness-generation', } satisfies CircuitWitnessGenerationStats); - // Now prove the circuit from the generated witness - logger.debug(`Proving ${circuitType}...`); + // Read witness buffer (last file I/O!) + const witnessBuffer = await fs.readFile(outputWitnessFile); - const provingResult = await generateProof( - this.config.bbBinaryPath, - workingDirectory, - circuitType, - Buffer.from(artifact.bytecode, 'base64'), - this.getVerificationKeyDataForCircuit(circuitType).keyAsBytes, - outputWitnessFile, - getUltraHonkFlavorForCircuit(circuitType), - logger, + // Get circuit data (already in memory) + const bytecode = Buffer.from(artifact.bytecode, 'base64'); + const vkData = this.getVerificationKeyDataForCircuit(circuitType); + const flavor = getUltraHonkFlavorForCircuit(circuitType); + + // Prove via msgpack API - ALL IN MEMORY! + logger.debug(`Proving ${circuitType} via msgpack API...`); + const startMs = Date.now(); + + const proof = await this.bbMsgpackProver.proveCircuit( + witnessBuffer, + bytecode, + vkData.keyAsBytes, + circuitName, + flavor, + proofLength, + vkData, ); - if (provingResult.status === BB_RESULT.FAILURE) { - logger.error(`Failed to generate proof for ${circuitType}: ${provingResult.reason}`); - throw new ProvingError(provingResult.reason, provingResult, provingResult.retry); - } + const durationMs = Date.now() - startMs; + + // Record metrics + this.instrumentation.recordDuration('provingDuration', circuitName, durationMs); + this.instrumentation.recordSize('proofSize', circuitName, proof.binaryProof.buffer.length); + this.instrumentation.recordSize('circuitPublicInputCount', circuitName, vkData.numPublicInputs); + this.instrumentation.recordSize('circuitSize', circuitName, vkData.circuitSize); + + logger.info( + `Generated proof for ${circuitType} in ${Math.ceil(durationMs)} ms, size: ${proof.proof.length} fields`, + { + circuitName, + circuitSize: vkData.circuitSize, + duration: durationMs, + inputSize: output.toBuffer().length, + proofSize: proof.binaryProof.buffer.length, + eventName: 'circuit-proving', + numPublicInputs: vkData.numPublicInputs, + } satisfies CircuitProvingStats, + ); return { circuitOutput: output, - provingResult, + proof, + durationMs, }; } + /** + * Verifies a proof using bb.js msgpack API - NO FILE I/O! + */ + private async verifyWithKeyMsgpack( + proof: Proof, + verificationKey: { keyAsBytes: Buffer }, + flavor: UltraHonkFlavor, + ): Promise { + // Verify via msgpack API - ALL IN MEMORY! + await this.bbMsgpackProver.verifyCircuit(proof, verificationKey.keyAsBytes, flavor); + logger.debug('Successfully verified proof via msgpack API'); + } + + /** + * Generates an AVM proof using the BB CLI (not msgpack - AVM API not available yet in bb.js) + */ private async generateAvmProofWithBB(input: AvmCircuitInputs, workingDirectory: string): Promise { logger.info(`Proving avm-circuit for TX ${input.hints.tx.hash}...`); @@ -531,6 +596,62 @@ export class BBNativeRollupProver implements ServerCircuitProver { return provingResult; } + /** + * Reads an AVM proof from disk and converts it to RecursiveProof format. + */ + private async readAvmProofAsFields( + proofFilename: string, + ): Promise> { + const rawProofBuffer = await fs.readFile(proofFilename); + const reader = BufferReader.asReader(rawProofBuffer); + const proofFields = reader.readArray(rawProofBuffer.length / Fr.SIZE_IN_BYTES, Fr); + + // We extend to a fixed-size padded proof as during development any new AVM circuit column changes the + // proof length and we do not have a mechanism to feedback a cpp constant to noir/TS. + // TODO(#13390): Revive a non-padded AVM proof + if (proofFields.length > AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED) { + throw new Error( + `Proof has ${proofFields.length} fields, expected no more than ${AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED}.`, + ); + } + const proofFieldsPadded = proofFields.concat( + Array(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED - proofFields.length).fill(new Fr(0)), + ); + + const proof = new Proof(rawProofBuffer, /*numPublicInputs=*/ 0); + return new RecursiveProof(proofFieldsPadded, proof, true, AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED); + } + + /** + * Internal helper for verification - writes proof/vk to temp files and calls BB CLI. + */ + private async verifyWithKeyInternal( + proof: Proof, + verificationKey: { keyAsBytes: Buffer }, + verificationFunction: (proofPath: string, vkPath: string) => Promise, + ) { + const operation = async (bbWorkingDirectory: string) => { + const publicInputsFileName = path.join(bbWorkingDirectory, PUBLIC_INPUTS_FILENAME); + const proofFileName = path.join(bbWorkingDirectory, PROOF_FILENAME); + const verificationKeyPath = path.join(bbWorkingDirectory, VK_FILENAME); + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/13189): Put this proof parsing logic in the proof class. + await fs.writeFile(publicInputsFileName, proof.buffer.slice(0, proof.numPublicInputs * 32)); + await fs.writeFile(proofFileName, proof.buffer.slice(proof.numPublicInputs * 32)); + await fs.writeFile(verificationKeyPath, verificationKey.keyAsBytes); + + const result = await verificationFunction(proofFileName, verificationKeyPath!); + + if (result.status === BB_RESULT.FAILURE) { + const errorMessage = `Failed to verify proof from key!`; + throw new ProvingError(errorMessage, result, result.retry); + } + + logger.info(`Successfully verified proof from key in ${result.durationMs} ms`); + }; + + await this.runInDirectory(operation); + } + private async createAvmProof( input: AvmCircuitInputs, ): Promise> { @@ -567,6 +688,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { /** * Executes a circuit and returns its outputs and corresponding proof with embedded aggregation object + * NOW USES MSGPACK API - NO FILE I/O for proving! * @param witnessMap - The input witness * @param circuitType - The type of circuit to be executed * @param proofLength - The length of the proof to be generated. This is a dummy parameter to aid in type checking @@ -585,40 +707,17 @@ export class BBNativeRollupProver implements ServerCircuitProver { convertInput: (input: CircuitInputType) => WitnessMap, convertOutput: (outputWitness: WitnessMap) => CircuitOutputType, ): Promise<{ circuitOutput: CircuitOutputType; proof: RecursiveProof }> { - // this probably is gonna need to call client ivc + // Use msgpack API - eliminates file I/O for proving! const operation = async (bbWorkingDirectory: string) => { - const { provingResult, circuitOutput: output } = await this.generateProofWithBB( + const { proof, circuitOutput: output } = await this.generateProofWithBBMsgpack( input, circuitType, + proofLength, convertInput, convertOutput, bbWorkingDirectory, ); - const vkData = this.getVerificationKeyDataForCircuit(circuitType); - // Read the proof as fields - const proof = await readProofsFromOutputDirectory(provingResult.proofPath!, vkData, proofLength, logger); - - const circuitName = mapProtocolArtifactNameToCircuitName(circuitType); - this.instrumentation.recordDuration('provingDuration', circuitName, provingResult.durationMs); - this.instrumentation.recordSize('proofSize', circuitName, proof.binaryProof.buffer.length); - this.instrumentation.recordSize('circuitPublicInputCount', circuitName, vkData.numPublicInputs); - this.instrumentation.recordSize('circuitSize', circuitName, vkData.circuitSize); - logger.info( - `Generated proof for ${circuitType} in ${Math.ceil(provingResult.durationMs)} ms, size: ${ - proof.proof.length - } fields`, - { - circuitName, - circuitSize: vkData.circuitSize, - duration: provingResult.durationMs, - inputSize: output.toBuffer().length, - proofSize: proof.binaryProof.buffer.length, - eventName: 'circuit-proving', - numPublicInputs: vkData.numPublicInputs, - } satisfies CircuitProvingStats, - ); - return { circuitOutput: output, proof, @@ -629,6 +728,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { /** * Verifies a proof, will generate the verification key if one is not cached internally + * NOW USES MSGPACK API - NO FILE I/O! * @param circuitType - The type of circuit whose proof is to be verified * @param proof - The proof to be verified */ @@ -648,36 +748,8 @@ export class BBNativeRollupProver implements ServerCircuitProver { } public async verifyWithKey(flavor: UltraHonkFlavor, verificationKey: VerificationKeyData, proof: Proof) { - return await this.verifyWithKeyInternal(proof, verificationKey, (proofPath, vkPath) => - verifyProof(this.config.bbBinaryPath, proofPath, vkPath, flavor, logger), - ); - } - - private async verifyWithKeyInternal( - proof: Proof, - verificationKey: { keyAsBytes: Buffer }, - verificationFunction: (proofPath: string, vkPath: string) => Promise, - ) { - const operation = async (bbWorkingDirectory: string) => { - const publicInputsFileName = path.join(bbWorkingDirectory, PUBLIC_INPUTS_FILENAME); - const proofFileName = path.join(bbWorkingDirectory, PROOF_FILENAME); - const verificationKeyPath = path.join(bbWorkingDirectory, VK_FILENAME); - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/13189): Put this proof parsing logic in the proof class. - await fs.writeFile(publicInputsFileName, proof.buffer.slice(0, proof.numPublicInputs * 32)); - await fs.writeFile(proofFileName, proof.buffer.slice(proof.numPublicInputs * 32)); - await fs.writeFile(verificationKeyPath, verificationKey.keyAsBytes); - - const result = await verificationFunction(proofFileName, verificationKeyPath!); - - if (result.status === BB_RESULT.FAILURE) { - const errorMessage = `Failed to verify proof from key!`; - throw new ProvingError(errorMessage, result, result.retry); - } - - logger.info(`Successfully verified proof from key in ${result.durationMs} ms`); - }; - - await this.runInDirectory(operation); + // Use msgpack API - NO FILE I/O! + return await this.verifyWithKeyMsgpack(proof, verificationKey, flavor); } /** @@ -693,29 +765,6 @@ export class BBNativeRollupProver implements ServerCircuitProver { return vk; } - private async readAvmProofAsFields( - proofFilename: string, - ): Promise> { - const rawProofBuffer = await fs.readFile(proofFilename); - const reader = BufferReader.asReader(rawProofBuffer); - const proofFields = reader.readArray(rawProofBuffer.length / Fr.SIZE_IN_BYTES, Fr); - - // We extend to a fixed-size padded proof as during development any new AVM circuit column changes the - // proof length and we do not have a mechanism to feedback a cpp constant to noir/TS. - // TODO(#13390): Revive a non-padded AVM proof - if (proofFields.length > AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED) { - throw new Error( - `Proof has ${proofFields.length} fields, expected no more than ${AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED}.`, - ); - } - const proofFieldsPadded = proofFields.concat( - Array(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED - proofFields.length).fill(new Fr(0)), - ); - - const proof = new Proof(rawProofBuffer, /*numPublicInputs=*/ 0); - return new RecursiveProof(proofFieldsPadded, proof, true, AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED); - } - private runInDirectory(fn: (dir: string) => Promise) { return runInDirectory( this.config.bbWorkingDirectory, diff --git a/yarn-project/foundation/eslint.config.js b/yarn-project/foundation/eslint.config.js index 7f690f5830ee..858baa394b33 100644 --- a/yarn-project/foundation/eslint.config.js +++ b/yarn-project/foundation/eslint.config.js @@ -107,6 +107,7 @@ export default [ '@libp2p/bootstrap', // Seems like ignoring l1-artifacts in the eslint call messes up no-unresolved '@aztec/l1-artifacts', + '@aztec/bb.js', ], }, ], diff --git a/yarn-project/foundation/src/crypto/aes128/index.ts b/yarn-project/foundation/src/crypto/aes128/index.ts index cbdb78a63aef..e45d58dea764 100644 --- a/yarn-project/foundation/src/crypto/aes128/index.ts +++ b/yarn-project/foundation/src/crypto/aes128/index.ts @@ -1,4 +1,4 @@ -import { BarretenbergSync, RawBuffer } from '@aztec/bb.js'; +import { BarretenbergSync } from '@aztec/bb.js'; import { Buffer } from 'buffer'; @@ -22,10 +22,15 @@ export class Aes128 { paddingBuffer.fill(numPaddingBytes); const input = Buffer.concat([data, paddingBuffer]); - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - return Buffer.from( - api.aesEncryptBufferCbc(new RawBuffer(input), new RawBuffer(iv), new RawBuffer(key), input.length), - ); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.aesEncrypt({ + plaintext: input, + iv, + key, + length: input.length, + }); + return Buffer.from(response.ciphertext); } /** @@ -37,11 +42,15 @@ export class Aes128 { * @returns Decrypted data. */ public async decryptBufferCBCKeepPadding(data: Uint8Array, iv: Uint8Array, key: Uint8Array): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const paddedBuffer = Buffer.from( - api.aesDecryptBufferCbc(new RawBuffer(data), new RawBuffer(iv), new RawBuffer(key), data.length), - ); - return paddedBuffer; + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.aesDecrypt({ + ciphertext: data, + iv, + key, + length: data.length, + }); + return Buffer.from(response.plaintext); } /** diff --git a/yarn-project/foundation/src/crypto/ecdsa/index.ts b/yarn-project/foundation/src/crypto/ecdsa/index.ts index 6e6444ff4e12..3807ddf11aa4 100644 --- a/yarn-project/foundation/src/crypto/ecdsa/index.ts +++ b/yarn-project/foundation/src/crypto/ecdsa/index.ts @@ -1,7 +1,5 @@ import { BarretenbergSync } from '@aztec/bb.js'; -import { numToInt32BE } from '@aztec/foundation/serialize'; -import { concatenateUint8Arrays } from '../serialize.js'; import { EcdsaSignature } from './signature.js'; export * from './signature.js'; @@ -18,11 +16,13 @@ export class Ecdsa { * @returns A secp256k1 public key. */ public async computePublicKey(privateKey: Buffer): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api - .getWasm() - .callWasmExport(`ecdsa_${this.curve === 'secp256r1' ? 'r' : ''}_compute_public_key`, [privateKey], [64]); - return Buffer.from(result); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = + this.curve === 'secp256r1' + ? api.ecdsaSecp256r1ComputePublicKey({ privateKey }) + : api.ecdsaSecp256k1ComputePublicKey({ privateKey }); + return Buffer.concat([Buffer.from(response.publicKey.x), Buffer.from(response.publicKey.y)]); } /** @@ -32,16 +32,13 @@ export class Ecdsa { * @returns An ECDSA signature of the form (r, s, v). */ public async constructSignature(msg: Uint8Array, privateKey: Buffer) { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const messageArray = concatenateUint8Arrays([numToInt32BE(msg.length), msg]); - const [r, s, v] = api - .getWasm() - .callWasmExport( - `ecdsa_${this.curve === 'secp256r1' ? 'r' : ''}_construct_signature_`, - [messageArray, privateKey], - [32, 32, 1], - ); - return new EcdsaSignature(Buffer.from(r), Buffer.from(s), Buffer.from(v)); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = + this.curve === 'secp256r1' + ? api.ecdsaSecp256r1ConstructSignature({ message: msg, privateKey }) + : api.ecdsaSecp256k1ConstructSignature({ message: msg, privateKey }); + return new EcdsaSignature(Buffer.from(response.r), Buffer.from(response.s), Buffer.from([response.v])); } /** @@ -51,16 +48,13 @@ export class Ecdsa { * @returns The secp256k1 public key of the signer. */ public async recoverPublicKey(msg: Uint8Array, sig: EcdsaSignature): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const messageArray = concatenateUint8Arrays([numToInt32BE(msg.length), msg]); - const [result] = api - .getWasm() - .callWasmExport( - `ecdsa_${this.curve === 'secp256r1' ? 'r' : ''}_recover_public_key_from_signature_`, - [messageArray, sig.r, sig.s, sig.v], - [64], - ); - return Buffer.from(result); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = + this.curve === 'secp256r1' + ? api.ecdsaSecp256r1RecoverPublicKey({ message: msg, r: sig.r, s: sig.s, v: sig.v[0] }) + : api.ecdsaSecp256k1RecoverPublicKey({ message: msg, r: sig.r, s: sig.s, v: sig.v[0] }); + return Buffer.concat([Buffer.from(response.publicKey.x), Buffer.from(response.publicKey.y)]); } /** @@ -71,15 +65,24 @@ export class Ecdsa { * @returns True or false. */ public async verifySignature(msg: Uint8Array, pubKey: Buffer, sig: EcdsaSignature) { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const messageArray = concatenateUint8Arrays([numToInt32BE(msg.length), msg]); - const [result] = api - .getWasm() - .callWasmExport( - `ecdsa_${this.curve === 'secp256r1' ? 'r' : ''}_verify_signature_`, - [messageArray, pubKey, sig.r, sig.s, sig.v], - [1], - ); - return result[0] === 1; + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = + this.curve === 'secp256r1' + ? api.ecdsaSecp256r1VerifySignature({ + message: msg, + publicKey: { x: pubKey.subarray(0, 32), y: pubKey.subarray(32, 64) }, + r: sig.r, + s: sig.s, + v: sig.v[0], + }) + : api.ecdsaSecp256k1VerifySignature({ + message: msg, + publicKey: { x: pubKey.subarray(0, 32), y: pubKey.subarray(32, 64) }, + r: sig.r, + s: sig.s, + v: sig.v[0], + }); + return response.verified; } } diff --git a/yarn-project/foundation/src/crypto/grumpkin/index.ts b/yarn-project/foundation/src/crypto/grumpkin/index.ts index b1f1faa6f118..12222dc417bb 100644 --- a/yarn-project/foundation/src/crypto/grumpkin/index.ts +++ b/yarn-project/foundation/src/crypto/grumpkin/index.ts @@ -28,9 +28,13 @@ export class Grumpkin { * @returns Result of the multiplication. */ public async mul(point: Point, scalar: GrumpkinScalar): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api.getWasm().callWasmExport('ecc_grumpkin__mul', [point.toBuffer(), scalar.toBuffer()], [64]); - return Point.fromBuffer(Buffer.from(result)); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.grumpkinMul({ + point: { x: point.x.toBuffer(), y: point.y.toBuffer() }, + scalar: scalar.toBuffer(), + }); + return Point.fromBuffer(Buffer.concat([Buffer.from(response.point.x), Buffer.from(response.point.y)])); } /** @@ -40,9 +44,13 @@ export class Grumpkin { * @returns Result of the addition. */ public async add(a: Point, b: Point): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api.getWasm().callWasmExport('ecc_grumpkin__add', [a.toBuffer(), b.toBuffer()], [64]); - return Point.fromBuffer(Buffer.from(result)); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.grumpkinAdd({ + pointA: { x: a.x.toBuffer(), y: a.y.toBuffer() }, + pointB: { x: b.x.toBuffer(), y: b.y.toBuffer() }, + }); + return Point.fromBuffer(Buffer.concat([Buffer.from(response.point.x), Buffer.from(response.point.y)])); } /** @@ -52,24 +60,14 @@ export class Grumpkin { * @returns Points multiplied by the scalar. */ public async batchMul(points: Point[], scalar: GrumpkinScalar) { - const concatenatedPoints: Buffer = Buffer.concat(points.map(point => point.toBuffer())); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.grumpkinBatchMul({ + points: points.map(p => ({ x: p.x.toBuffer(), y: p.y.toBuffer() })), + scalar: scalar.toBuffer(), + }); - const pointsByteLength = points.length * Point.SIZE_IN_BYTES; - - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api - .getWasm() - .callWasmExport( - 'ecc_grumpkin__batch_mul', - [concatenatedPoints, scalar.toBuffer(), points.length], - [pointsByteLength], - ); - - const parsedResult: Point[] = []; - for (let i = 0; i < pointsByteLength; i += 64) { - parsedResult.push(Point.fromBuffer(Buffer.from(result.subarray(i, i + 64)))); - } - return parsedResult; + return response.points.map(p => Point.fromBuffer(Buffer.concat([Buffer.from(p.x), Buffer.from(p.y)]))); } /** @@ -77,9 +75,10 @@ export class Grumpkin { * @returns Random field element. */ public async getRandomFr(): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api.getWasm().callWasmExport('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', [], [32]); - return Fr.fromBuffer(Buffer.from(result)); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.grumpkinGetRandomFr({ dummy: 0 }); + return Fr.fromBuffer(Buffer.from(response.value)); } /** @@ -88,10 +87,9 @@ export class Grumpkin { * @returns Buffer representation of the field element. */ public async reduce512BufferToFr(uint512Buf: Buffer): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api - .getWasm() - .callWasmExport('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', [uint512Buf], [32]); - return Fr.fromBuffer(Buffer.from(result)); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.grumpkinReduce512({ input: uint512Buf }); + return Fr.fromBuffer(Buffer.from(response.value)); } } diff --git a/yarn-project/foundation/src/crypto/keys/index.ts b/yarn-project/foundation/src/crypto/keys/index.ts index 821e3967557a..5e1429333109 100644 --- a/yarn-project/foundation/src/crypto/keys/index.ts +++ b/yarn-project/foundation/src/crypto/keys/index.ts @@ -1,10 +1,10 @@ -import { BarretenbergSync, RawBuffer } from '@aztec/bb.js'; +import { BarretenbergSync } from '@aztec/bb.js'; import { Fr } from '../../fields/fields.js'; export async function vkAsFieldsMegaHonk(input: Buffer): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const result = api.acirVkAsFieldsMegaHonk(new RawBuffer(input)); - - return result.map(bbFr => Fr.fromBuffer(Buffer.from(bbFr.toBuffer()))); // TODO(#4189): remove this conversion + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.megaVkAsFields({ verificationKey: input }); + return response.fields.map(field => Fr.fromBuffer(Buffer.from(field))); } diff --git a/yarn-project/foundation/src/crypto/pedersen/index.test.ts b/yarn-project/foundation/src/crypto/pedersen/index.test.ts index b2e0e6273e02..326e2d5603e0 100644 --- a/yarn-project/foundation/src/crypto/pedersen/index.test.ts +++ b/yarn-project/foundation/src/crypto/pedersen/index.test.ts @@ -1,9 +1,12 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + import { toBufferBE } from '../../bigint-buffer/index.js'; import { setupCustomSnapshotSerializers } from '../../testing/index.js'; import { pedersenCommit, pedersenHash, pedersenHashBuffer } from './index.js'; describe('pedersen', () => { - beforeAll(() => { + beforeAll(async () => { + await BarretenbergSync.initSingleton({ threads: 1 }); setupCustomSnapshotSerializers(expect); }); diff --git a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts index 41f4e1776af9..9c063b0413b9 100644 --- a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts +++ b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts @@ -1,4 +1,4 @@ -import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js'; +import { BarretenbergSync } from '@aztec/bb.js'; import { Fr } from '../../fields/fields.js'; import { type Fieldable, serializeToFields } from '../../serialize/serialize.js'; @@ -12,14 +12,13 @@ export async function pedersenCommit(input: Buffer[], offset = 0) { throw new Error('All Pedersen Commit input buffers must be <= 32 bytes.'); } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const point = api.pedersenCommit( - input.map(i => new FrBarretenberg(i)), - offset, - ); - // toBuffer returns Uint8Arrays (browser/worker-boundary friendly). - // TODO: rename toTypedArray()? - return [Buffer.from(point.x.toBuffer()), Buffer.from(point.y.toBuffer())]; + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.pedersenCommit({ + inputs: input, + hashIndex: offset, + }); + return [Buffer.from(response.point.x), Buffer.from(response.point.y)]; } /** @@ -30,19 +29,24 @@ export async function pedersenCommit(input: Buffer[], offset = 0) { */ export async function pedersenHash(input: Fieldable[], index = 0): Promise { const inputFields = serializeToFields(input); - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const hash = api.pedersenHash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - index, - ); - return Fr.fromBuffer(Buffer.from(hash.toBuffer())); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.pedersenHash({ + inputs: inputFields.map(i => i.toBuffer()), + hashIndex: index, + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } /** * Create a pedersen hash from an arbitrary length buffer. */ export async function pedersenHashBuffer(input: Buffer, index = 0) { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const result = api.pedersenHashBuffer(input, index); - return Buffer.from(result.toBuffer()); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.pedersenHashBuffer({ + input, + hashIndex: index, + }); + return Buffer.from(response.hash); } diff --git a/yarn-project/foundation/src/crypto/poseidon/index.test.ts b/yarn-project/foundation/src/crypto/poseidon/index.test.ts index 7adf60921580..c3e6c9c5defb 100644 --- a/yarn-project/foundation/src/crypto/poseidon/index.test.ts +++ b/yarn-project/foundation/src/crypto/poseidon/index.test.ts @@ -1,7 +1,13 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + import { Fr } from '../../fields/fields.js'; import { poseidon2Permutation } from './index.js'; describe('poseidon2Permutation', () => { + beforeAll(async () => { + await BarretenbergSync.initSingleton({ threads: 1 }); + }); + it('test vectors from cpp should match', async () => { const init = [0, 1, 2, 3]; await expect(poseidon2Permutation(init)).resolves.toEqual([ diff --git a/yarn-project/foundation/src/crypto/poseidon/index.ts b/yarn-project/foundation/src/crypto/poseidon/index.ts index fce2b0c606e4..d47105af5a74 100644 --- a/yarn-project/foundation/src/crypto/poseidon/index.ts +++ b/yarn-project/foundation/src/crypto/poseidon/index.ts @@ -1,4 +1,4 @@ -import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js'; +import { BarretenbergSync } from '@aztec/bb.js'; import { Fr } from '../../fields/fields.js'; import { type Fieldable, serializeToFields } from '../../serialize/serialize.js'; @@ -10,11 +10,12 @@ import { type Fieldable, serializeToFields } from '../../serialize/serialize.js' */ export async function poseidon2Hash(input: Fieldable[]): Promise { const inputFields = serializeToFields(input); - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const hash = api.poseidon2Hash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - ); - return Fr.fromBuffer(Buffer.from(hash.toBuffer())); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Hash({ + inputs: inputFields.map(i => i.toBuffer()), + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } /** @@ -26,19 +27,22 @@ export async function poseidon2Hash(input: Fieldable[]): Promise { export async function poseidon2HashWithSeparator(input: Fieldable[], separator: number): Promise { const inputFields = serializeToFields(input); inputFields.unshift(new Fr(separator)); - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - - const hash = api.poseidon2Hash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - ); - return Fr.fromBuffer(Buffer.from(hash.toBuffer())); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Hash({ + inputs: inputFields.map(i => i.toBuffer()), + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } export async function poseidon2HashAccumulate(input: Fieldable[]): Promise { const inputFields = serializeToFields(input); - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const result = api.poseidon2HashAccumulate(inputFields.map(i => new FrBarretenberg(i.toBuffer()))); - return Fr.fromBuffer(Buffer.from(result.toBuffer())); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2HashAccumulate({ + inputs: inputFields.map(i => i.toBuffer()), + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } /** @@ -50,11 +54,14 @@ export async function poseidon2Permutation(input: Fieldable[]): Promise { const inputFields = serializeToFields(input); // We'd like this assertion but it's not possible to use it in the browser. // assert(input.length === 4, 'Input state must be of size 4'); - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const res = api.poseidon2Permutation(inputFields.map(i => new FrBarretenberg(i.toBuffer()))); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Permutation({ + inputs: inputFields.map(i => i.toBuffer()), + }); // We'd like this assertion but it's not possible to use it in the browser. - // assert(res.length === 4, 'Output state must be of size 4'); - return res.map(o => Fr.fromBuffer(Buffer.from(o.toBuffer()))); + // assert(response.outputs.length === 4, 'Output state must be of size 4'); + return response.outputs.map(o => Fr.fromBuffer(Buffer.from(o))); } export async function poseidon2HashBytes(input: Buffer): Promise { @@ -68,10 +75,11 @@ export async function poseidon2HashBytes(input: Buffer): Promise { inputFields.push(Fr.fromBuffer(fieldBytes)); } - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const res = api.poseidon2Hash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - ); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Hash({ + inputs: inputFields.map(i => i.toBuffer()), + }); - return Fr.fromBuffer(Buffer.from(res.toBuffer())); + return Fr.fromBuffer(Buffer.from(response.hash)); } diff --git a/yarn-project/foundation/src/crypto/schnorr/index.ts b/yarn-project/foundation/src/crypto/schnorr/index.ts index ab36c337a663..9b6fe02a7e2c 100644 --- a/yarn-project/foundation/src/crypto/schnorr/index.ts +++ b/yarn-project/foundation/src/crypto/schnorr/index.ts @@ -1,8 +1,6 @@ import { BarretenbergSync } from '@aztec/bb.js'; import { type GrumpkinScalar, Point } from '@aztec/foundation/fields'; -import { numToInt32BE } from '@aztec/foundation/serialize'; -import { concatenateUint8Arrays } from '../serialize.js'; import { SchnorrSignature } from './signature.js'; export * from './signature.js'; @@ -17,9 +15,10 @@ export class Schnorr { * @returns A grumpkin public key. */ public async computePublicKey(privateKey: GrumpkinScalar): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api.getWasm().callWasmExport('schnorr_compute_public_key', [privateKey.toBuffer()], [64]); - return Point.fromBuffer(Buffer.from(result)); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.schnorrComputePublicKey({ privateKey: privateKey.toBuffer() }); + return Point.fromBuffer(Buffer.concat([Buffer.from(response.publicKey.x), Buffer.from(response.publicKey.y)])); } /** @@ -29,12 +28,13 @@ export class Schnorr { * @returns A Schnorr signature of the form (s, e). */ public async constructSignature(msg: Uint8Array, privateKey: GrumpkinScalar) { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const messageArray = concatenateUint8Arrays([numToInt32BE(msg.length), msg]); - const [s, e] = api - .getWasm() - .callWasmExport('schnorr_construct_signature', [messageArray, privateKey.toBuffer()], [32, 32]); - return new SchnorrSignature(Buffer.from([...s, ...e])); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.schnorrConstructSignature({ + message: msg, + privateKey: privateKey.toBuffer(), + }); + return new SchnorrSignature(Buffer.from([...response.s, ...response.e])); } /** @@ -45,11 +45,14 @@ export class Schnorr { * @returns True or false. */ public async verifySignature(msg: Uint8Array, pubKey: Point, sig: SchnorrSignature) { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const messageArray = concatenateUint8Arrays([numToInt32BE(msg.length), msg]); - const [result] = api - .getWasm() - .callWasmExport('schnorr_verify_signature', [messageArray, pubKey.toBuffer(), sig.s, sig.e], [1]); - return result[0] === 1; + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.schnorrVerifySignature({ + message: msg, + publicKey: { x: pubKey.x.toBuffer(), y: pubKey.y.toBuffer() }, + s: sig.s, + e: sig.e, + }); + return response.verified; } } diff --git a/yarn-project/foundation/src/crypto/secp256k1/index.ts b/yarn-project/foundation/src/crypto/secp256k1/index.ts index 1b3e879670c7..23ad2e61b6e6 100644 --- a/yarn-project/foundation/src/crypto/secp256k1/index.ts +++ b/yarn-project/foundation/src/crypto/secp256k1/index.ts @@ -27,9 +27,13 @@ export class Secp256k1 { * @returns Result of the multiplication. */ public async mul(point: Uint8Array, scalar: Uint8Array) { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api.getWasm().callWasmExport('ecc_secp256k1__mul', [point, scalar], [64]); - return Buffer.from(result); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.secp256k1Mul({ + point: { x: point.subarray(0, 32), y: point.subarray(32, 64) }, + scalar, + }); + return Buffer.concat([Buffer.from(response.point.x), Buffer.from(response.point.y)]); } /** @@ -37,9 +41,10 @@ export class Secp256k1 { * @returns Random field element. */ public async getRandomFr() { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api.getWasm().callWasmExport('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', [], [32]); - return Buffer.from(result); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.secp256k1GetRandomFr({ dummy: 0 }); + return Buffer.from(response.value); } /** @@ -48,10 +53,9 @@ export class Secp256k1 { * @returns Buffer representation of the field element. */ public async reduce512BufferToFr(uint512Buf: Buffer) { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const [result] = api - .getWasm() - .callWasmExport('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', [uint512Buf], [32]); - return Buffer.from(result); + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.secp256k1Reduce512({ input: uint512Buf }); + return Buffer.from(response.value); } } diff --git a/yarn-project/foundation/src/crypto/sync/index.ts b/yarn-project/foundation/src/crypto/sync/index.ts index a14fe1e6edb9..65a9546b4089 100644 --- a/yarn-project/foundation/src/crypto/sync/index.ts +++ b/yarn-project/foundation/src/crypto/sync/index.ts @@ -3,4 +3,4 @@ import { BarretenbergSync } from '@aztec/bb.js'; export * from './poseidon/index.js'; export * from './pedersen/index.js'; -await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); +await BarretenbergSync.initSingleton(); diff --git a/yarn-project/foundation/src/crypto/sync/pedersen/index.test.ts b/yarn-project/foundation/src/crypto/sync/pedersen/index.test.ts index 0a498ccea855..80864fc874fb 100644 --- a/yarn-project/foundation/src/crypto/sync/pedersen/index.test.ts +++ b/yarn-project/foundation/src/crypto/sync/pedersen/index.test.ts @@ -6,7 +6,7 @@ import { pedersenCommit, pedersenHash, pedersenHashBuffer } from './index.js'; describe('pedersen', () => { beforeAll(async () => { - await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); + await BarretenbergSync.initSingleton(); setupCustomSnapshotSerializers(expect); }); diff --git a/yarn-project/foundation/src/crypto/sync/pedersen/index.ts b/yarn-project/foundation/src/crypto/sync/pedersen/index.ts index 8fb73c59ca76..fcc921a96277 100644 --- a/yarn-project/foundation/src/crypto/sync/pedersen/index.ts +++ b/yarn-project/foundation/src/crypto/sync/pedersen/index.ts @@ -1,4 +1,4 @@ -import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js'; +import { BarretenbergSync } from '@aztec/bb.js'; import { Fr } from '../../../fields/fields.js'; import { type Fieldable, serializeToFields } from '../../../serialize/serialize.js'; @@ -12,13 +12,11 @@ export function pedersenCommit(input: Buffer[], offset = 0) { throw new Error('All Pedersen Commit input buffers must be <= 32 bytes.'); } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); - const point = BarretenbergSync.getSingleton().pedersenCommit( - input.map(i => new FrBarretenberg(i)), - offset, - ); - // toBuffer returns Uint8Arrays (browser/worker-boundary friendly). - // TODO: rename toTypedArray()? - return [Buffer.from(point.x.toBuffer()), Buffer.from(point.y.toBuffer())]; + const response = BarretenbergSync.getSingleton().pedersenCommit({ + inputs: input, + hashIndex: offset, + }); + return [Buffer.from(response.point.x), Buffer.from(response.point.y)]; } /** @@ -29,17 +27,20 @@ export function pedersenCommit(input: Buffer[], offset = 0) { */ export function pedersenHash(input: Fieldable[], index = 0): Fr { const inputFields = serializeToFields(input); - const hash = BarretenbergSync.getSingleton().pedersenHash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - index, - ); - return Fr.fromBuffer(Buffer.from(hash.toBuffer())); + const response = BarretenbergSync.getSingleton().pedersenHash({ + inputs: inputFields.map(i => i.toBuffer()), + hashIndex: index, + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } /** * Create a pedersen hash from an arbitrary length buffer. */ export function pedersenHashBuffer(input: Buffer, index = 0) { - const result = BarretenbergSync.getSingleton().pedersenHashBuffer(input, index); - return Buffer.from(result.toBuffer()); + const response = BarretenbergSync.getSingleton().pedersenHashBuffer({ + input, + hashIndex: index, + }); + return Buffer.from(response.hash); } diff --git a/yarn-project/foundation/src/crypto/sync/poseidon/index.test.ts b/yarn-project/foundation/src/crypto/sync/poseidon/index.test.ts index da3162c465d4..30deea1116a2 100644 --- a/yarn-project/foundation/src/crypto/sync/poseidon/index.test.ts +++ b/yarn-project/foundation/src/crypto/sync/poseidon/index.test.ts @@ -5,8 +5,9 @@ import { poseidon2Permutation } from './index.js'; describe('poseidon2Permutation', () => { beforeAll(async () => { - await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); + await BarretenbergSync.initSingleton(); }); + it('test vectors from cpp should match', () => { const init = [0, 1, 2, 3]; expect(poseidon2Permutation(init)).toEqual([ diff --git a/yarn-project/foundation/src/crypto/sync/poseidon/index.ts b/yarn-project/foundation/src/crypto/sync/poseidon/index.ts index 86040b130d0d..8a825c188456 100644 --- a/yarn-project/foundation/src/crypto/sync/poseidon/index.ts +++ b/yarn-project/foundation/src/crypto/sync/poseidon/index.ts @@ -1,4 +1,4 @@ -import { BarretenbergSync, Fr as FrBarretenberg } from '@aztec/bb.js'; +import { BarretenbergSync } from '@aztec/bb.js'; import { Fr } from '../../../fields/fields.js'; import { type Fieldable, serializeToFields } from '../../../serialize/serialize.js'; @@ -10,10 +10,11 @@ import { type Fieldable, serializeToFields } from '../../../serialize/serialize. */ export function poseidon2Hash(input: Fieldable[]): Fr { const inputFields = serializeToFields(input); - const hash = BarretenbergSync.getSingleton().poseidon2Hash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - ); - return Fr.fromBuffer(Buffer.from(hash.toBuffer())); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Hash({ + inputs: inputFields.map(i => i.toBuffer()), + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } /** @@ -26,18 +27,20 @@ export function poseidon2HashWithSeparator(input: Fieldable[], separator: number const inputFields = serializeToFields(input); inputFields.unshift(new Fr(separator)); - const hash = BarretenbergSync.getSingleton().poseidon2Hash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - ); - return Fr.fromBuffer(Buffer.from(hash.toBuffer())); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Hash({ + inputs: inputFields.map(i => i.toBuffer()), + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } export function poseidon2HashAccumulate(input: Fieldable[]): Fr { const inputFields = serializeToFields(input); - const result = BarretenbergSync.getSingleton().poseidon2HashAccumulate( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), - ); - return Fr.fromBuffer(Buffer.from(result.toBuffer())); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2HashAccumulate({ + inputs: inputFields.map(i => i.toBuffer()), + }); + return Fr.fromBuffer(Buffer.from(response.hash)); } /** @@ -49,12 +52,13 @@ export function poseidon2Permutation(input: Fieldable[]): Fr[] { const inputFields = serializeToFields(input); // We'd like this assertion but it's not possible to use it in the browser. // assert(input.length === 4, 'Input state must be of size 4'); - const res = BarretenbergSync.getSingleton().poseidon2Permutation( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), - ); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Permutation({ + inputs: inputFields.map(i => i.toBuffer()), + }); // We'd like this assertion but it's not possible to use it in the browser. - // assert(res.length === 4, 'Output state must be of size 4'); - return res.map(o => Fr.fromBuffer(Buffer.from(o.toBuffer()))); + // assert(response.outputs.length === 4, 'Output state must be of size 4'); + return response.outputs.map(o => Fr.fromBuffer(Buffer.from(o))); } export function poseidon2HashBytes(input: Buffer): Fr { @@ -68,9 +72,10 @@ export function poseidon2HashBytes(input: Buffer): Fr { inputFields.push(Fr.fromBuffer(fieldBytes)); } - const res = BarretenbergSync.getSingleton().poseidon2Hash( - inputFields.map(i => new FrBarretenberg(i.toBuffer())), // TODO(#4189): remove this stupid conversion - ); + const api = BarretenbergSync.getSingleton(); + const response = api.poseidon2Hash({ + inputs: inputFields.map(i => i.toBuffer()), + }); - return Fr.fromBuffer(Buffer.from(res.toBuffer())); + return Fr.fromBuffer(Buffer.from(response.hash)); } diff --git a/yarn-project/foundation/src/fields/fields.ts b/yarn-project/foundation/src/fields/fields.ts index f1dbe526cf4c..f18866899d77 100644 --- a/yarn-project/foundation/src/fields/fields.ts +++ b/yarn-project/foundation/src/fields/fields.ts @@ -321,15 +321,14 @@ export class Fr extends BaseField { * @returns A square root of the field element (null if it does not exist). */ async sqrt(): Promise { - const api = await BarretenbergSync.initSingleton(process.env.BB_WASM_PATH); - const wasm = api.getWasm(); - const [buf] = wasm.callWasmExport('bn254_fr_sqrt', [this.toBuffer()], [Fr.SIZE_IN_BYTES + 1]); - const isSqrt = buf[0] === 1; - if (!isSqrt) { + await BarretenbergSync.initSingleton(); + const api = BarretenbergSync.getSingleton(); + const response = api.bn254FrSqrt({ input: this.toBuffer() }); + if (!response.isSquareRoot) { // Field element is not a quadratic residue mod p so it has no square root. return null; } - return new Fr(Buffer.from(buf.slice(1))); + return Fr.fromBuffer(Buffer.from(response.value)); } toJSON() {