From 10052b178af7b9b29e2f27e7406e6e95bffa7a53 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Thu, 19 Feb 2026 11:44:55 +0000 Subject: [PATCH 1/4] wip --- .../test/benchmarking_contract/src/main.nr | 8 + spartan/environments/prove-n-tps-fake.env | 19 +- .../validator-resources-prod-hi-tps.yaml | 23 ++ .../prover/client/bb_private_kernel_prover.ts | 2 +- .../src/spartan/block_capacity.test.ts | 371 ++++++++++++++++++ .../end-to-end/src/spartan/utils/config.ts | 1 + .../src/test-wallet/wallet_worker_script.ts | 68 ++-- .../src/test-wallet/worker_wallet.ts | 54 ++- .../src/transport/transport_client.ts | 4 +- 9 files changed, 505 insertions(+), 45 deletions(-) create mode 100644 spartan/terraform/deploy-aztec-infra/values/validator-resources-prod-hi-tps.yaml create mode 100644 yarn-project/end-to-end/src/spartan/block_capacity.test.ts diff --git a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr index 24528824843c..0310e2752b1c 100644 --- a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr @@ -61,4 +61,12 @@ pub contract Benchmarking { fn sha256_hash_1024(data: [u8; 1024]) -> [u8; 32] { sha256::sha256_var(data, data.len()) } + + // Lightest possible private transaction: empty app circuit, no state changes, no public calls. + #[external("private")] + fn noop() {} + + // Lightest possible private transaction: empty app circuit, no state changes, no public calls. + #[external("public")] + fn noop_pub() {} } diff --git a/spartan/environments/prove-n-tps-fake.env b/spartan/environments/prove-n-tps-fake.env index bed763852285..80cb90944ece 100644 --- a/spartan/environments/prove-n-tps-fake.env +++ b/spartan/environments/prove-n-tps-fake.env @@ -2,9 +2,9 @@ NAMESPACE=${NAMESPACE:-prove-n-tps-fake} CLUSTER=aztec-gke-private GCP_REGION=us-west1-a -AZTEC_EPOCH_DURATION=32 +AZTEC_EPOCH_DURATION=8 AZTEC_SLOT_DURATION=72 -AZTEC_PROOF_SUBMISSION_EPOCHS=1 +AZTEC_PROOF_SUBMISSION_EPOCHS=4 AZTEC_LAG_IN_EPOCHS_FOR_VALIDATOR_SET=1 AZTEC_LAG_IN_EPOCHS_FOR_RANDAO=1 @@ -20,8 +20,8 @@ FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET -VALIDATOR_REPLICAS=4 -VALIDATORS_PER_NODE=12 +VALIDATOR_REPLICAS=1 +VALIDATORS_PER_NODE=48 PUBLISHERS_PER_VALIDATOR_KEY=1 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 @@ -31,20 +31,19 @@ RPC_REPLICAS=1 RPC_INGRESS_ENABLED=false PROVER_REPLICAS=200 -PROVER_RESOURCE_PROFILE="hi-tps" +PROVER_RESOURCE_PROFILE="dev" PROVER_PUBLISHER_MNEMONIC_START_INDEX=8000 PROVER_AGENT_POLL_INTERVAL_MS=10000 PUBLISHERS_PER_PROVER=1 PROVER_TEST_DELAY_TYPE=realistic -PROVER_TEST_VERIFICATION_DELAY_MS=250 +DEBUG_FORCE_TX_PROOF_VERIFICATION=true -SEQ_MAX_TX_PER_BLOCK=80 +SEQ_MAX_TX_PER_BLOCK=72000 # 1000 tps SEQ_MIN_TX_PER_BLOCK=0 +SEQ_ENFORCE_TIME_TABLE=true P2P_MAX_TX_POOL_SIZE=1000000000 DEBUG_P2P_INSTRUMENT_MESSAGES=true -# Reduce the amount of metrics produced by prover agents and full nodes -PROVER_AGENT_INCLUDE_METRICS="aztec.circuit" -LOG_LEVEL=info +LOG_LEVEL="debug; info: json-rpc, simulator" diff --git a/spartan/terraform/deploy-aztec-infra/values/validator-resources-prod-hi-tps.yaml b/spartan/terraform/deploy-aztec-infra/values/validator-resources-prod-hi-tps.yaml new file mode 100644 index 000000000000..ea5ef92f5b67 --- /dev/null +++ b/spartan/terraform/deploy-aztec-infra/values/validator-resources-prod-hi-tps.yaml @@ -0,0 +1,23 @@ +validator: + nodeSelector: + local-ssd: "false" + node-type: "network" + cores: "8" + hi-mem: "true" + node: + resources: + requests: + cpu: "7.5" + memory: "55Gi" + + nodeJsOptions: + - "--max-old-space-size=61440" + statefulSet: + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: [ReadWriteOnce] + resources: + requests: + storage: 16Gi diff --git a/yarn-project/bb-prover/src/prover/client/bb_private_kernel_prover.ts b/yarn-project/bb-prover/src/prover/client/bb_private_kernel_prover.ts index 049815bd6567..70f691a6508c 100644 --- a/yarn-project/bb-prover/src/prover/client/bb_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/prover/client/bb_private_kernel_prover.ts @@ -278,7 +278,7 @@ export abstract class BBPrivateKernelProver implements PrivateKernelProver { this.log.info(`Generating ClientIVC proof...`); const barretenberg = await Barretenberg.initSingleton({ ...this.options, - logger: this.options.logger?.[(process.env.LOG_LEVEL as LogLevel) || 'verbose'], + logger: this.options.logger?.verbose, }); const backend = new AztecClientBackend( executionSteps.map(step => ungzip(step.bytecode)), diff --git a/yarn-project/end-to-end/src/spartan/block_capacity.test.ts b/yarn-project/end-to-end/src/spartan/block_capacity.test.ts new file mode 100644 index 000000000000..5f661c524cba --- /dev/null +++ b/yarn-project/end-to-end/src/spartan/block_capacity.test.ts @@ -0,0 +1,371 @@ +import { SchnorrAccountContract } from '@aztec/accounts/schnorr'; +import { AztecAddress } from '@aztec/aztec.js/addresses'; +import { type ContractFunctionInteraction, NO_WAIT, toSendOptions } from '@aztec/aztec.js/contracts'; +import { SponsoredFeePaymentMethod } from '@aztec/aztec.js/fee'; +import { type AztecNode, createAztecNodeClient, waitForTx } from '@aztec/aztec.js/node'; +import { AccountManager } from '@aztec/aztec.js/wallet'; +import { asyncPool } from '@aztec/foundation/async-pool'; +import { BlockNumber } from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { createLogger } from '@aztec/foundation/log'; +import { retryUntil } from '@aztec/foundation/retry'; +import { TokenContract } from '@aztec/noir-contracts.js/Token'; +import { BenchmarkingContract } from '@aztec/noir-test-contracts.js/Benchmarking'; +import { GasFees } from '@aztec/stdlib/gas'; +import { deriveSigningKey } from '@aztec/stdlib/keys'; +import { Tx } from '@aztec/stdlib/tx'; + +import { jest } from '@jest/globals'; + +import { getSponsoredFPCAddress, registerSponsoredFPC } from '../fixtures/utils.js'; +import type { WorkerWallet } from '../test-wallet/worker_wallet.js'; +import { type WorkerWalletWrapper, createWorkerWalletClient } from './setup_test_wallets.js'; +import { getExternalIP, getSequencersConfig, setupEnvironment, updateSequencersConfig } from './utils.js'; + +const config = setupEnvironment(process.env); + +const NUM_WALLETS = config.REAL_VERIFIER || config.DEBUG_FORCE_TX_PROOF_VERIFICATION ? 10 : 1; +const TX_COUNT = parseInt(process.env.TX_COUNT ?? '100', 10); + +describe('block capacity benchmark', () => { + jest.setTimeout(60 * 60 * 1000); // 60 minutes + + const logger = createLogger('e2e:spartan-test:block-capacity'); + + let testWallets: WorkerWalletWrapper[]; + let wallets: WorkerWallet[]; + let accountAddresses: AztecAddress[]; + let aztecNode: AztecNode; + let benchmarkContract: BenchmarkingContract; + let tokenContract: TokenContract; + let originalSequencerConfig: Awaited> | undefined; + + beforeAll(async () => { + logger.info('Setting up block capacity benchmark', { + txCount: TX_COUNT, + numWallets: NUM_WALLETS, + realVerifier: config.REAL_VERIFIER, + namespace: config.NAMESPACE, + }); + + await updateSequencersConfig(config, { minTxsPerBlock: 0 }); + + const rpcIP = await getExternalIP(config.NAMESPACE, 'rpc-aztec-node'); + const rpcUrl = `http://${rpcIP}:8080`; + aztecNode = createAztecNodeClient(rpcUrl); + + // Wait for node to be ready + await retryUntil(async () => await aztecNode.isReady(), 'node ready', 120, 1); + logger.info('Node is ready'); + + // Save original sequencer config for restoration + originalSequencerConfig = await getSequencersConfig(config); + logger.info('Saved original sequencer config', { + minTxsPerBlock: originalSequencerConfig[0]?.minTxsPerBlock, + }); + + // Create WorkerWallets in parallel + logger.info(`Creating ${NUM_WALLETS} worker wallet(s)...`); + testWallets = await Promise.all( + Array.from({ length: NUM_WALLETS }, (_, i) => { + logger.info(`Creating wallet ${i + 1}/${NUM_WALLETS}`); + return createWorkerWalletClient( + rpcUrl, + config.REAL_VERIFIER || config.DEBUG_FORCE_TX_PROOF_VERIFICATION, + logger, + ); + }), + ); + wallets = testWallets.map(tw => tw.wallet); + + // Register FPC and create/deploy accounts in parallel + const fpcAddress = await getSponsoredFPCAddress(); + const sponsor = new SponsoredFeePaymentMethod(fpcAddress); + accountAddresses = await Promise.all( + wallets.map(async wallet => { + const secret = Fr.random(); + const salt = Fr.random(); + const address = await wallet.registerAccount(secret, salt); + await registerSponsoredFPC(wallet); + const manager = await AccountManager.create( + wallet, + secret, + new SchnorrAccountContract(deriveSigningKey(secret)), + salt, + ); + const deployMethod = await manager.getDeployMethod(); + await deployMethod.send({ + from: AztecAddress.ZERO, + fee: { paymentMethod: sponsor }, + wait: { timeout: 2400 }, + }); + logger.info(`Account deployed at ${address}`); + return address; + }), + ); + + // Deploy BenchmarkingContract using the first wallet + logger.info('Deploying benchmark contract...'); + benchmarkContract = await BenchmarkingContract.deploy(wallets[0]).send({ + from: accountAddresses[0], + fee: { paymentMethod: sponsor }, + }); + logger.info('BenchmarkingContract deployed', { address: benchmarkContract.address.toString() }); + + // Register benchmark contract with all other wallets + const benchMetadata = await wallets[0].getContractMetadata(benchmarkContract.address); + await Promise.all( + wallets.slice(1).map(wallet => wallet.registerContract(benchMetadata.instance!, BenchmarkingContract.artifact)), + ); + logger.info('Benchmark contract registered with all wallets'); + + // Deploy TokenContract using the first wallet + logger.info('Deploying token contract...'); + tokenContract = await TokenContract.deploy(wallets[0], accountAddresses[0], 'USDC', 'USD', 18n).send({ + from: accountAddresses[0], + fee: { paymentMethod: sponsor }, + wait: { timeout: 600 }, + }); + logger.info('TokenContract deployed', { address: tokenContract.address.toString() }); + + // Register token contract with all other wallets + const tokenMetadata = await wallets[0].getContractMetadata(tokenContract.address); + await Promise.all( + wallets.slice(1).map(wallet => wallet.registerContract(tokenMetadata.instance!, TokenContract.artifact)), + ); + logger.info('Token contract registered with all wallets'); + + // Mint tokens publicly to each account (enough for TX_COUNT transfers). + // Send sequentially to avoid PXE concurrency issues, then wait in parallel. + logger.info(`Minting ${TX_COUNT} tokens to each account...`); + const mintTxHashes = []; + for (const acc of accountAddresses) { + const txHash = await TokenContract.at(tokenContract.address, wallets[0]) + .methods.mint_to_public(acc, BigInt(TX_COUNT)) + .send({ from: accountAddresses[0], fee: { paymentMethod: sponsor }, wait: NO_WAIT }); + mintTxHashes.push(txHash); + } + await Promise.all(mintTxHashes.map(txHash => waitForTx(aztecNode, txHash, { timeout: 600 }))); + logger.info('Minting complete'); + }); + + afterAll(async () => { + // Restore original sequencer config + if (originalSequencerConfig?.[0]) { + logger.info('Restoring original sequencer config'); + await updateSequencersConfig(config, originalSequencerConfig[0]); + } + + if (testWallets) { + for (const tw of testWallets) { + await tw.cleanup(); + } + } + + logger.info('Cleanup complete'); + }); + + /** Creates and proves a single tx from a contract interaction. */ + async function createProvableTx( + wallet: WorkerWallet, + accountAddress: AztecAddress, + interaction: ContractFunctionInteraction, + ): Promise { + const sponsor = new SponsoredFeePaymentMethod(await getSponsoredFPCAddress()); + const options = { + from: accountAddress, + fee: { paymentMethod: sponsor, gasSettings: { maxPriorityFeesPerGas: GasFees.empty() } }, + }; + const execPayload = await interaction.request(options); + return wallet.proveTx(execPayload, toSendOptions(options)); + } + + /** Pre-proves TX_COUNT txs, either in parallel batches or by cloning a prototype. */ + async function proveOrCloneTxs( + createPrototypeFn: (wallet: WorkerWallet, accountAddress: AztecAddress) => Promise, + ): Promise { + const txs: Tx[] = []; + if (config.REAL_VERIFIER || config.DEBUG_FORCE_TX_PROOF_VERIFICATION) { + for (let i = 0; i < TX_COUNT; i += NUM_WALLETS) { + const batchSize = Math.min(NUM_WALLETS, TX_COUNT - i); + const batchTxs = await Promise.all( + Array.from({ length: batchSize }, (_, j) => createPrototypeFn(wallets[j], accountAddresses[j])), + ); + txs.push(...batchTxs); + logger.info(`Proved ${txs.length}/${TX_COUNT} txs`); + } + } else { + const prototypeTx = await createPrototypeFn(wallets[0], accountAddresses[0]); + logger.info('Prototype tx proved, cloning...'); + for (let i = 0; i < TX_COUNT; i++) { + txs.push(await cloneTx(prototypeTx, aztecNode)); + if ((i + 1) % 10 === 0 || i === TX_COUNT - 1) { + logger.info(`Cloned ${i + 1}/${TX_COUNT} txs`); + } + } + } + return txs; + } + + /** Floods the mempool with pre-proven txs and measures block capacity. */ + async function floodAndMeasure(label: string, provenTxs: Tx[]): Promise { + // 1. Disable block building by setting minTxsPerBlock extremely high + logger.info(`[${label}] Disabling block building`); + await updateSequencersConfig(config, { minTxsPerBlock: 999_999_999 }); + await retryUntil( + async () => { + const configs = await getSequencersConfig(config); + return configs.every(c => c.minTxsPerBlock === 999_999_999); + }, + 'disable block building', + 60, + 1, + ); + logger.info(`[${label}] Block building disabled`); + + const blockBeforeFlood = await aztecNode.getBlockNumber(); + logger.info(`[${label}] Block number before flood`, { blockBeforeFlood }); + + // 2. Send all pre-proven txs to mempool + logger.info(`[${label}] Sending ${provenTxs.length} pre-proven txs to mempool`); + const sendStartMs = Date.now(); + + let sentCount = 0; + await asyncPool(3, provenTxs, async tx => { + await aztecNode.sendTx(tx); + sentCount++; + if (sentCount % 10 === 0 || sentCount === provenTxs.length) { + logger.info(`[${label}] Sent ${sentCount}/${provenTxs.length} txs`); + } + }); + + const sendDurationMs = Date.now() - sendStartMs; + logger.info(`[${label}] All ${provenTxs.length} txs sent to mempool`, { sendDurationMs }); + + // 3. Re-enable block building + logger.info(`[${label}] Re-enabling block building`); + await updateSequencersConfig(config, { minTxsPerBlock: 1, enforceTimeTable: true }); + await retryUntil( + async () => { + const configs = await getSequencersConfig(config); + return configs.every(c => c.minTxsPerBlock === 1); + }, + 'enable block building', + 60, + 1, + ); + logger.info(`[${label}] Block building re-enabled`); + + // 4. Wait for blocks and observe inclusion + let totalTxsMined = 0; + const blockTxCounts: { blockNumber: number; txCount: number }[] = []; + + const timeoutMs = 10 * 60 * 1000; // 10 minutes + + await retryUntil( + async () => { + const currentBlock = await aztecNode.getBlockNumber(); + for (let bn = blockBeforeFlood + 1; bn <= currentBlock; bn++) { + if (blockTxCounts.some(b => b.blockNumber === bn)) { + continue; + } + const block = await aztecNode.getBlock(BlockNumber(bn)); + if (block) { + const txCount = block.body.txEffects.length; + blockTxCounts.push({ blockNumber: bn, txCount }); + totalTxsMined += txCount; + logger.info(`[${label}] Block ${bn}: ${txCount} txs (total mined: ${totalTxsMined}/${TX_COUNT})`); + } + } + return totalTxsMined >= TX_COUNT; + }, + 'all txs mined', + timeoutMs / 1000, + 2, + ); + + // Log summary + logger.info(`=== Block Capacity Benchmark Results (${label}) ===`); + logger.info(`Total txs sent: ${TX_COUNT}`); + logger.info(`Total txs mined: ${totalTxsMined}`); + logger.info(`Blocks produced: ${blockTxCounts.length}`); + for (const { blockNumber, txCount } of blockTxCounts) { + logger.info(` Block ${blockNumber}: ${txCount} txs`); + } + + if (blockTxCounts.length > 0) { + const maxTxsInBlock = Math.max(...blockTxCounts.map(b => b.txCount)); + const avgTxsPerBlock = totalTxsMined / blockTxCounts.length; + logger.info(`Max txs in a single block: ${maxTxsInBlock}`); + logger.info(`Avg txs per block: ${avgTxsPerBlock.toFixed(1)}`); + } + + expect(totalTxsMined).toBeGreaterThanOrEqual(TX_COUNT); + } + + it('measures block capacity with private noop txs', async () => { + logger.info(`Pre-proving ${TX_COUNT} private noop txs...`); + const txs = await proveOrCloneTxs((wallet, addr) => { + const contract = BenchmarkingContract.at(benchmarkContract.address, wallet); + return createProvableTx(wallet, addr, contract.methods.noop()); + }); + logger.info(`All ${TX_COUNT} private noop txs pre-proven`); + await floodAndMeasure('private noop', txs); + }); + + it('measures block capacity with public noop txs', async () => { + logger.info(`Pre-proving ${TX_COUNT} public noop txs...`); + const txs = await proveOrCloneTxs((wallet, addr) => { + const contract = BenchmarkingContract.at(benchmarkContract.address, wallet); + return createProvableTx(wallet, addr, contract.methods.noop_pub()); + }); + logger.info(`All ${TX_COUNT} public noop txs pre-proven`); + await floodAndMeasure('public noop', txs); + }); + + it.only('measures block capacity with public token transfers', async () => { + // Each account transfers 1 token to a "sink" address. + // Note: For the clone path, all cloned txs share the same sender/recipient/amount. + // Public state conflicts may cause some cloned txs to fail during execution. + const recipient = accountAddresses[0]; + logger.info(`Pre-proving ${TX_COUNT} public token transfer txs...`); + const txs = await proveOrCloneTxs((wallet, addr) => { + const token = TokenContract.at(tokenContract.address, wallet); + return createProvableTx(wallet, addr, token.methods.transfer_in_public(addr, recipient, 1n, 0)); + }); + logger.info(`All ${TX_COUNT} public token transfer txs pre-proven`); + await floodAndMeasure('public token transfer', txs); + }); +}); + +/** Clones a proven tx, randomizing nullifiers and updating fees so each clone is unique. */ +async function cloneTx(tx: Tx, aztecNode: AztecNode): Promise { + const clonedTx = Tx.clone(tx, false); + + // Fetch current minimum fees and apply 50% buffer for safety + const currentFees = await aztecNode.getCurrentMinFees(); + const paddedFees = currentFees.mul(1.5); + + // Update gas settings with current fees + (clonedTx.data.constants.txContext.gasSettings as any).maxFeesPerGas = paddedFees; + + // Randomize nullifiers to avoid conflicts + if (clonedTx.data.forRollup) { + for (let i = 0; i < clonedTx.data.forRollup.end.nullifiers.length; i++) { + if (clonedTx.data.forRollup.end.nullifiers[i].isZero()) { + continue; + } + clonedTx.data.forRollup.end.nullifiers[i] = Fr.random(); + } + } else if (clonedTx.data.forPublic) { + for (let i = 0; i < clonedTx.data.forPublic.nonRevertibleAccumulatedData.nullifiers.length; i++) { + if (clonedTx.data.forPublic.nonRevertibleAccumulatedData.nullifiers[i].isZero()) { + continue; + } + clonedTx.data.forPublic.nonRevertibleAccumulatedData.nullifiers[i] = Fr.random(); + } + } + + await clonedTx.recomputeHash(); + return clonedTx; +} diff --git a/yarn-project/end-to-end/src/spartan/utils/config.ts b/yarn-project/end-to-end/src/spartan/utils/config.ts index 3794cba69156..b93637df7429 100644 --- a/yarn-project/end-to-end/src/spartan/utils/config.ts +++ b/yarn-project/end-to-end/src/spartan/utils/config.ts @@ -8,6 +8,7 @@ const logger = createLogger('e2e:k8s-utils'); const testConfigSchema = z.object({ NAMESPACE: z.string().default('scenario'), REAL_VERIFIER: schemas.Boolean.optional().default(true), + DEBUG_FORCE_TX_PROOF_VERIFICATION: schemas.Boolean.optional().default(true), CREATE_ETH_DEVNET: schemas.Boolean.optional().default(false), L1_RPC_URLS_JSON: z.string().optional(), L1_ACCOUNT_MNEMONIC: z.string().optional(), diff --git a/yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts b/yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts index 820c1c402e95..2b422dfee6ce 100644 --- a/yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts +++ b/yarn-project/end-to-end/src/test-wallet/wallet_worker_script.ts @@ -1,5 +1,6 @@ import { createAztecNodeClient } from '@aztec/aztec.js/node'; import { jsonStringify } from '@aztec/foundation/json-rpc'; +import { createLogger } from '@aztec/foundation/log'; import type { ApiSchema } from '@aztec/foundation/schemas'; import { parseWithOptionals, schemaHasMethod } from '@aztec/foundation/schemas'; import { NodeListener, TransportServer } from '@aztec/foundation/transport'; @@ -9,35 +10,44 @@ import { workerData } from 'worker_threads'; import { TestWallet } from './test_wallet.js'; import { WorkerWalletSchema } from './worker_wallet_schema.js'; -const { nodeUrl, pxeConfig } = workerData as { nodeUrl: string; pxeConfig?: Record }; +const logger = createLogger('e2e:test-wallet:worker'); -const node = createAztecNodeClient(nodeUrl); -const wallet = await TestWallet.create(node, pxeConfig); +try { + const { nodeUrl, pxeConfig } = workerData as { nodeUrl: string; pxeConfig?: Record }; -/** Handlers for methods that need custom implementation (not direct wallet passthrough). */ -const handlers: Record Promise> = { - proveTx: async (exec, opts) => { - const provenTx = await wallet.proveTx(exec, opts); - // ProvenTx has non-serializable fields (node proxy, etc.) — extract only Tx-compatible fields - const { data, chonkProof, contractClassLogFields, publicFunctionCalldata } = provenTx; - return { data, chonkProof, contractClassLogFields, publicFunctionCalldata }; - }, - registerAccount: async (secret, salt) => { - const manager = await wallet.createSchnorrAccount(secret, salt); - return manager.address; - }, -}; + logger.info('Initializing worker wallet', { nodeUrl }); + const node = createAztecNodeClient(nodeUrl); + const wallet = await TestWallet.create(node, pxeConfig); + logger.info('Worker wallet initialized'); -const schema = WorkerWalletSchema as ApiSchema; -const listener = new NodeListener(); -const server = new TransportServer<{ fn: string; args: string }>(listener, async msg => { - if (!schemaHasMethod(schema, msg.fn)) { - throw new Error(`Unknown method: ${msg.fn}`); - } - const jsonParams = JSON.parse(msg.args) as unknown[]; - const args = await parseWithOptionals(jsonParams, schema[msg.fn].parameters()); - const handler = handlers[msg.fn]; - const result = handler ? await handler(...args) : await (wallet as any)[msg.fn](...args); - return jsonStringify(result); -}); -server.start(); + /** Handlers for methods that need custom implementation (not direct wallet passthrough). */ + const handlers: Record Promise> = { + proveTx: async (exec, opts) => { + const provenTx = await wallet.proveTx(exec, opts); + // ProvenTx has non-serializable fields (node proxy, etc.) — extract only Tx-compatible fields + const { data, chonkProof, contractClassLogFields, publicFunctionCalldata } = provenTx; + return { data, chonkProof, contractClassLogFields, publicFunctionCalldata }; + }, + registerAccount: async (secret, salt) => { + const manager = await wallet.createSchnorrAccount(secret, salt); + return manager.address; + }, + }; + + const schema = WorkerWalletSchema as ApiSchema; + const listener = new NodeListener(); + const server = new TransportServer<{ fn: string; args: string }>(listener, async msg => { + if (!schemaHasMethod(schema, msg.fn)) { + throw new Error(`Unknown method: ${msg.fn}`); + } + const jsonParams = JSON.parse(msg.args) as unknown[]; + const args = await parseWithOptionals(jsonParams, schema[msg.fn].parameters()); + const handler = handlers[msg.fn]; + const result = handler ? await handler(...args) : await (wallet as any)[msg.fn](...args); + return jsonStringify(result); + }); + server.start(); +} catch (err: unknown) { + logger.error('Worker wallet initialization failed', { error: err instanceof Error ? err.stack : String(err) }); + process.exit(1); +} diff --git a/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts b/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts index d5f8b34c591b..e9e2aaed518f 100644 --- a/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts +++ b/yarn-project/end-to-end/src/test-wallet/worker_wallet.ts @@ -19,7 +19,9 @@ import type { import type { ChainInfo } from '@aztec/entrypoints/interfaces'; import type { Fr } from '@aztec/foundation/curves/bn254'; import { jsonStringify } from '@aztec/foundation/json-rpc'; +import { createLogger } from '@aztec/foundation/log'; import type { ApiSchema } from '@aztec/foundation/schemas'; +import { sleep } from '@aztec/foundation/sleep'; import { NodeConnector, TransportClient } from '@aztec/foundation/transport'; import type { PXEConfig } from '@aztec/pxe/config'; import type { ContractArtifact, EventMetadataDefinition, FunctionCall } from '@aztec/stdlib/abi'; @@ -35,6 +37,10 @@ import { WorkerWalletSchema } from './worker_wallet_schema.js'; type WorkerMsg = { fn: string; args: string }; +const log = createLogger('e2e:test-wallet:worker-wallet'); + +const WORKER_READY_TIMEOUT_MS = 120_000; + /** * Wallet implementation that offloads all work to a worker thread. * Implements the Wallet interface by proxying calls over a transport layer @@ -53,8 +59,18 @@ export class WorkerWallet implements Wallet { * @returns A WorkerWallet ready to use. */ static async create(nodeUrl: string, pxeConfig?: Partial): Promise { - const worker = new Worker(new URL('./wallet_worker_script.js', import.meta.url), { + // When running under Jest, import.meta.url points to src/ but compiled JS is in dest/ + const workerUrl = new URL('./wallet_worker_script.js', import.meta.url); + workerUrl.pathname = workerUrl.pathname.replace('/src/', '/dest/'); + // Strip JEST_WORKER_ID so the worker uses pino-pretty transport instead of Jest's raw output. + const { JEST_WORKER_ID: _, ...parentEnv } = process.env; + const worker = new Worker(workerUrl, { workerData: { nodeUrl, pxeConfig }, + env: { + ...parentEnv, + ...(process.stderr.isTTY || process.env.FORCE_COLOR ? { FORCE_COLOR: '1' } : {}), + LOG_LEVEL: process.env.WORKER_LOG_LEVEL ?? 'info', + }, }); const connector = new NodeConnector(worker); @@ -62,8 +78,40 @@ export class WorkerWallet implements Wallet { await client.open(); const wallet = new WorkerWallet(worker, client); - // Warmup / readiness check — blocks until the worker has finished creating the TestWallet. - await wallet.getChainInfo(); + + // Reject if the worker exits or errors before the warmup completes. + let onError: ((err: Error) => void) | undefined; + let onExit: ((code: number) => void) | undefined; + const workerDied = new Promise((_resolve, reject) => { + onError = (err: Error) => { + worker.off('exit', onExit!); + reject(new Error(`Worker wallet thread error: ${err.message}`)); + }; + onExit = (code: number) => { + worker.off('error', onError!); + reject(new Error(`Worker wallet thread exited with code ${code} before becoming ready`)); + }; + worker.once('error', onError); + worker.once('exit', onExit); + }); + + const timeout = sleep(WORKER_READY_TIMEOUT_MS).then(() => { + throw new Error(`Worker wallet creation timed out after ${WORKER_READY_TIMEOUT_MS / 1000}s`); + }); + + try { + // Warmup / readiness check — blocks until the worker has finished creating the TestWallet. + await Promise.race([wallet.getChainInfo(), workerDied, timeout]); + } catch (err) { + log.error('Worker wallet creation failed, cleaning up', { error: String(err) }); + client.close(); + await worker.terminate(); + throw err; + } finally { + worker.off('error', onError!); + worker.off('exit', onExit!); + } + return wallet; } diff --git a/yarn-project/foundation/src/transport/transport_client.ts b/yarn-project/foundation/src/transport/transport_client.ts index e1aa0260e811..fb2b98f86ddf 100644 --- a/yarn-project/foundation/src/transport/transport_client.ts +++ b/yarn-project/foundation/src/transport/transport_client.ts @@ -91,7 +91,7 @@ export class TransportClient extends EventEmitter { } const msgId = this.msgId++; const msg = { msgId, payload }; - log.debug(format(`->`, msg)); + log.trace(format(`->`, msg)); return new Promise((resolve, reject) => { this.pendingRequests.push({ resolve, reject, msgId }); this.socket!.send(msg, transfer).catch(reject); @@ -111,7 +111,7 @@ export class TransportClient extends EventEmitter { this.close(); return; } - log.debug(format(`<-`, msg)); + log.trace(format(`<-`, msg)); if (isEventMessage(msg)) { this.emit('event_msg', msg.payload); return; From 547defee798e55f920e12cba779e2c9039476d03 Mon Sep 17 00:00:00 2001 From: sirasistant Date: Thu, 19 Feb 2026 15:51:18 +0000 Subject: [PATCH 2/4] add fns for other sideffects --- .../test/benchmarking_contract/src/main.nr | 43 ++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr index 0310e2752b1c..e368bc8fca1d 100644 --- a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr @@ -11,7 +11,14 @@ pub contract Benchmarking { macros::{functions::external, storage::storage}, messages::message_delivery::MessageDelivery, note::note_getter_options::NoteGetterOptions, - protocol::address::AztecAddress, + oracle::random::random, + protocol::{ + address::{AztecAddress, EthAddress}, + constants::{ + MAX_L2_TO_L1_MSGS_PER_CALL, MAX_NOTE_HASHES_PER_CALL, MAX_NULLIFIERS_PER_CALL, + MAX_PRIVATE_LOGS_PER_CALL, PRIVATE_LOG_SIZE_IN_FIELDS, + }, + }, state_vars::{Map, Owned, PrivateSet, PublicMutable}, }; use field_note::FieldNote; @@ -66,6 +73,40 @@ pub contract Benchmarking { #[external("private")] fn noop() {} + #[external("private")] + fn emit_nullifiers() { + // Safety: Benchmarking code + let random_seed = unsafe { random() }; + for i in 0..MAX_NULLIFIERS_PER_CALL { + self.context.push_nullifier(random_seed + (i as Field)); + } + } + + #[external("private")] + fn emit_note_hashes() { + for i in 0..MAX_NOTE_HASHES_PER_CALL { + self.context.push_note_hash(0xdeadbeef + (i as Field)); + } + } + + #[external("private")] + fn emit_l2_to_l1_msgs() { + for i in 0..MAX_L2_TO_L1_MSGS_PER_CALL { + self.context.message_portal(EthAddress::from_field(i as Field), (i + 1) as Field); + } + } + + #[external("private")] + fn emit_private_logs() { + for i in 0..MAX_PRIVATE_LOGS_PER_CALL { + let mut log = [0; PRIVATE_LOG_SIZE_IN_FIELDS]; + for j in 0..PRIVATE_LOG_SIZE_IN_FIELDS { + log[i] = 0xdeadbeef + (i * MAX_PRIVATE_LOGS_PER_CALL + j) as Field; + } + self.context.emit_private_log(log, PRIVATE_LOG_SIZE_IN_FIELDS); + } + } + // Lightest possible private transaction: empty app circuit, no state changes, no public calls. #[external("public")] fn noop_pub() {} From 8e507dc9a1c5ca6fb75a08e5c9adcc9738fdc5e3 Mon Sep 17 00:00:00 2001 From: sirasistant Date: Thu, 19 Feb 2026 16:08:29 +0000 Subject: [PATCH 3/4] add contract class and use seeds everywhere just in case --- .../test/benchmarking_contract/src/main.nr | 35 ++++++++++++++++--- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr index e368bc8fca1d..8ac7ae99e582 100644 --- a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr @@ -15,8 +15,9 @@ pub contract Benchmarking { protocol::{ address::{AztecAddress, EthAddress}, constants::{ - MAX_L2_TO_L1_MSGS_PER_CALL, MAX_NOTE_HASHES_PER_CALL, MAX_NULLIFIERS_PER_CALL, - MAX_PRIVATE_LOGS_PER_CALL, PRIVATE_LOG_SIZE_IN_FIELDS, + CONTRACT_CLASS_LOG_SIZE_IN_FIELDS, MAX_L2_TO_L1_MSGS_PER_CALL, + MAX_NOTE_HASHES_PER_CALL, MAX_NULLIFIERS_PER_CALL, MAX_PRIVATE_LOGS_PER_CALL, + PRIVATE_LOG_SIZE_IN_FIELDS, }, }, state_vars::{Map, Owned, PrivateSet, PublicMutable}, @@ -84,29 +85,53 @@ pub contract Benchmarking { #[external("private")] fn emit_note_hashes() { + // Safety: Benchmarking code + let random_seed = unsafe { random() }; + for i in 0..MAX_NOTE_HASHES_PER_CALL { - self.context.push_note_hash(0xdeadbeef + (i as Field)); + self.context.push_note_hash(random_seed + (i as Field)); } } #[external("private")] fn emit_l2_to_l1_msgs() { + // Safety: Benchmarking code + let random_seed = unsafe { random() }; + for i in 0..MAX_L2_TO_L1_MSGS_PER_CALL { - self.context.message_portal(EthAddress::from_field(i as Field), (i + 1) as Field); + self.context.message_portal( + EthAddress::from_field(random_seed + (i as Field)), + random_seed + (i + 1) as Field, + ); } } #[external("private")] fn emit_private_logs() { + // Safety: Benchmarking code + let random_seed = unsafe { random() }; + for i in 0..MAX_PRIVATE_LOGS_PER_CALL { let mut log = [0; PRIVATE_LOG_SIZE_IN_FIELDS]; for j in 0..PRIVATE_LOG_SIZE_IN_FIELDS { - log[i] = 0xdeadbeef + (i * MAX_PRIVATE_LOGS_PER_CALL + j) as Field; + log[i] = random_seed + (i * MAX_PRIVATE_LOGS_PER_CALL + j) as Field; } self.context.emit_private_log(log, PRIVATE_LOG_SIZE_IN_FIELDS); } } + #[external("private")] + fn emit_contract_class_log() { + // Safety: Benchmarking code + let random_seed = unsafe { random() }; + + let mut log = [0; CONTRACT_CLASS_LOG_SIZE_IN_FIELDS]; + for i in 0..log.len() { + log[i] = random_seed + (i as Field); + } + self.context.emit_contract_class_log(log); + } + // Lightest possible private transaction: empty app circuit, no state changes, no public calls. #[external("public")] fn noop_pub() {} From e427de5881ec506a3d7b8881ab2e16c350bff5fc Mon Sep 17 00:00:00 2001 From: sirasistant Date: Thu, 19 Feb 2026 16:41:35 +0000 Subject: [PATCH 4/4] fix --- .../contracts/test/benchmarking_contract/src/main.nr | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr index 8ac7ae99e582..c48ddf69888e 100644 --- a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr @@ -99,10 +99,8 @@ pub contract Benchmarking { let random_seed = unsafe { random() }; for i in 0..MAX_L2_TO_L1_MSGS_PER_CALL { - self.context.message_portal( - EthAddress::from_field(random_seed + (i as Field)), - random_seed + (i + 1) as Field, - ); + let recipient = EthAddress::from_field((random_seed as u128) as Field + (i as Field)); + self.context.message_portal(recipient, random_seed + (i + 1) as Field); } }