From 93b8be23601c0208e4cb9cd7b6f6cce779fea820 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Mar 2026 09:18:36 +0000 Subject: [PATCH 01/37] test: update proving-real test to mbps (#20991) . --- spartan/environments/prove-n-tps-real.env | 5 +- .../src/spartan/n_tps_prove.test.ts | 100 ++++++++++++------ 2 files changed, 70 insertions(+), 35 deletions(-) diff --git a/spartan/environments/prove-n-tps-real.env b/spartan/environments/prove-n-tps-real.env index 55e5bd30fab0..4be76065b0ca 100644 --- a/spartan/environments/prove-n-tps-real.env +++ b/spartan/environments/prove-n-tps-real.env @@ -36,8 +36,11 @@ PROVER_PUBLISHER_MNEMONIC_START_INDEX=8000 PROVER_AGENT_POLL_INTERVAL_MS=10000 PUBLISHERS_PER_PROVER=1 -SEQ_MAX_TX_PER_BLOCK=80 +SEQ_MAX_TX_PER_BLOCK=18 SEQ_MIN_TX_PER_BLOCK=0 +SEQ_BLOCK_DURATION_MS=6000 +SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT=36 +SEQ_BUILD_CHECKPOINT_IF_EMPTY=true P2P_MAX_TX_POOL_SIZE=1000000000 DEBUG_P2P_INSTRUMENT_MESSAGES=true diff --git a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts index 5c1750c41b24..0e20ccefc471 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts @@ -8,13 +8,13 @@ import { RollupCheatCodes } from '@aztec/aztec/testing'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { EthCheatCodesWithState } from '@aztec/ethereum/test'; import { SlotNumber } from '@aztec/foundation/branded-types'; -import { timesAsync } from '@aztec/foundation/collection'; +import { timesParallel } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; -import { DateProvider } from '@aztec/foundation/timer'; -import { BenchmarkingContract } from '@aztec/noir-test-contracts.js/Benchmarking'; +import { DateProvider, Timer } from '@aztec/foundation/timer'; +import { AvmGadgetsTestContract } from '@aztec/noir-test-contracts.js/AvmGadgetsTest'; import { GasFees } from '@aztec/stdlib/gas'; import { deriveSigningKey } from '@aztec/stdlib/keys'; import { Tx, TxHash } from '@aztec/stdlib/tx'; @@ -122,7 +122,7 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { let producerPromises: Promise[]; let aztecNode: AztecNode; - let benchmarkContract: BenchmarkingContract; + let benchmarkContract: AvmGadgetsTestContract; let metrics: ProvingMetrics; let childProcesses: ChildProcess[]; @@ -269,7 +269,7 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { ); logger.info(`Creating ${NUM_WALLETS} wallet(s)...`); - testWallets = await timesAsync(NUM_WALLETS, i => { + testWallets = await timesParallel(NUM_WALLETS, i => { logger.info(`Creating wallet ${i + 1}/${NUM_WALLETS}`); return createWorkerWalletClient(rpcUrl, config.REAL_VERIFIER, logger); }); @@ -278,33 +278,31 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { // Register FPC and create/deploy accounts const fpcAddress = await getSponsoredFPCAddress(); const sponsor = new SponsoredFeePaymentMethod(fpcAddress); - accountAddresses = []; - for (const wallet of wallets) { - const secret = Fr.random(); - const salt = Fr.random(); - // Register account inside worker (populates TestWallet.accounts map) - const address = await wallet.registerAccount(secret, salt); - // Register FPC in worker's PXE - await registerSponsoredFPC(wallet); - // Deploy via standard AccountManager flow (from: ZERO -> SignerlessAccount, no account lookup) - const manager = await AccountManager.create( - wallet, - secret, - new SchnorrAccountContract(deriveSigningKey(secret)), - salt, - ); - const deployMethod = await manager.getDeployMethod(); - await deployMethod.send({ - from: AztecAddress.ZERO, - fee: { paymentMethod: sponsor }, - wait: { timeout: 2400 }, - }); - logger.info(`Account deployed at ${address}`); - accountAddresses.push(address); - } + accountAddresses = await Promise.all( + wallets.map(async wallet => { + const secret = Fr.random(); + const salt = Fr.random(); + const address = await wallet.registerAccount(secret, salt); + await registerSponsoredFPC(wallet); + const manager = await AccountManager.create( + wallet, + secret, + new SchnorrAccountContract(deriveSigningKey(secret)), + salt, + ); + const deployMethod = await manager.getDeployMethod(); + await deployMethod.send({ + from: AztecAddress.ZERO, + fee: { paymentMethod: sponsor }, + wait: { timeout: 2400 }, + }); + logger.info(`Account deployed at ${address}`); + return address; + }), + ); logger.info('Deploying benchmark contract...'); - benchmarkContract = await BenchmarkingContract.deploy(wallets[0]).send({ + benchmarkContract = await AvmGadgetsTestContract.deploy(wallets[0]).send({ from: accountAddresses[0], fee: { paymentMethod: sponsor }, }); @@ -365,6 +363,14 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { await scaleProverAgents(config.NAMESPACE, 10, logger); }); + afterAll(async () => { + try { + await scaleProverAgents(config.NAMESPACE, 2, logger); + } catch (err) { + logger.error(`Failed to scale prover agents: ${err}`); + } + }); + it(`sends ${TARGET_TPS} TPS for a full epoch and waits for proof`, async () => { const [testEpoch, startSlot, { proven: startProvenBlockNumber, pending: startBlockNumber }] = await Promise.all([ rollupCheatCodes.getEpoch(), @@ -432,6 +438,8 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { let failureCount = 0; const batchSize = 10; + const TX_MINING_TIMEOUT_S = epochDurationSeconds; + const miningTimer = new Timer(); while (pendingTxs.size > 0) { const entries = [...pendingTxs.entries()]; const start = Math.floor(Math.random() * Math.max(1, entries.length - batchSize + 1)); @@ -464,6 +472,22 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { ); } + if (miningTimer.s() > TX_MINING_TIMEOUT_S) { + const remainingHashes = [...pendingTxs.values()].map(h => h.toString()); + logger.warn( + `Timed out waiting for ${pendingTxs.size}/${totalSent} transactions after ${TX_MINING_TIMEOUT_S}s. ` + + `These transactions likely were not included in this epoch's blocks. ` + + `Remaining tx hashes: ${remainingHashes.join(', ')}`, + ); + break; + } + + if (processedCount === 0) { + logger.info( + `Still waiting for ${pendingTxs.size}/${totalSent} transactions (${Math.floor(miningTimer.s())}s elapsed)`, + ); + } + await sleep(500); } @@ -483,6 +507,8 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { // Poll for proof completion while detecting reorgs let lastBlockNumber = endBlockNumber; + const PROOF_TIMEOUT_S = epochDurationSeconds; + const proofTimer = new Timer(); while (true) { const [provenBlock, currentBlockNumber] = await Promise.all([ @@ -507,7 +533,13 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { break; } - logger.debug(`Proven: ${provenBlock}, Pending: ${currentBlockNumber}, Target: ${targetProvenBlock}`); + if (proofTimer.s() > PROOF_TIMEOUT_S) { + throw new Error( + `Timed out waiting for proof after ${PROOF_TIMEOUT_S}s. Proven: ${provenBlock}, Target: ${targetProvenBlock}`, + ); + } + + logger.info(`Proven: ${provenBlock}, Pending: ${currentBlockNumber}, Target: ${targetProvenBlock}`); lastBlockNumber = currentBlockNumber; await sleep(10 * 1000); // Poll every 10 seconds @@ -530,7 +562,7 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { async function createTx( wallet: WorkerWallet, accountAddress: AztecAddress, - benchmarkContract: BenchmarkingContract, + benchmarkContract: AvmGadgetsTestContract, logger: Logger, ): Promise { logger.info('Creating prototype transaction...'); @@ -539,7 +571,7 @@ async function createTx( from: accountAddress, fee: { paymentMethod: sponsor, gasSettings: { maxPriorityFeesPerGas: GasFees.empty() } }, }; - const interaction = benchmarkContract.methods.sha256_hash_1024(Array(1024).fill(42)); + const interaction = benchmarkContract.methods.keccak_hash_1400(Array(1400).fill(42)); const execPayload = await interaction.request(options); const tx = await wallet.proveTx(execPayload, toSendOptions(options)); logger.info('Prototype transaction created'); @@ -579,7 +611,7 @@ async function cloneTx(tx: Tx, aztecNode: AztecNode): Promise { async function startProducing( producer: WalletTxProducer, - benchmarkContract: BenchmarkingContract, + benchmarkContract: AvmGadgetsTestContract, aztecNode: AztecNode, signal: AbortSignal, logger: Logger, From 88ea0a933becc1827c2644c888d592e2f6dbb448 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Mar 2026 09:41:51 +0000 Subject: [PATCH 02/37] chore: epoch proving log analyzer (#21033) A script to analyze the proving timeline of an epoch. Output example: ``` $ ./spartan/scripts/extract_proving_metrics.ts prove-n-tps-real --start 2026-03-02 --epoch 3 Epoch 3 stats: Checkpoints: 32 (33 to 64), Blocks: 256 (40 to 295), Txs: 2283 Blob fields per checkpoint: 211.92ms Blob batching: 3408.91ms Timeline: Epoch started proving T+0s Blocks started processing T+1m 3s PUBLIC_CHONK_VERIFIER first enqueued (2283 jobs) T+1m 9s PARITY_BASE first enqueued (1 jobs) T+1m 9s # ... Proving jobs by stage: PARITY_BASE 1 jobs enqueued T+1m 9s completed T+1m 22s (13s) PARITY_ROOT 1 jobs enqueued T+1m 23s completed T+7m 1s (5m 38s) PUBLIC_CHONK_VERIFIER 2283 jobs enqueued T+1m 9s..T+1m 16s completed T+1m 21s..T+6m 55s (5m 46s) # ... Per-job duration stats: Type Count Median Mean p90 Max PARITY_BASE 1 13.4s 13.4s 13.4s 13.4s PARITY_ROOT 1 338.3s 338.3s 338.3s 338.3s # ... Per block (sorted by block number): Block 40 (slot 96): 18 txs, processing 29.4s Block 41 (slot 96): 18 txs, processing 44.8s # ... ``` --- spartan/scripts/extract_proving_metrics.ts | 852 +++++++++++++++++++++ 1 file changed, 852 insertions(+) create mode 100755 spartan/scripts/extract_proving_metrics.ts diff --git a/spartan/scripts/extract_proving_metrics.ts b/spartan/scripts/extract_proving_metrics.ts new file mode 100755 index 000000000000..46d1d01ae966 --- /dev/null +++ b/spartan/scripts/extract_proving_metrics.ts @@ -0,0 +1,852 @@ +#!/usr/bin/env -S node --experimental-strip-types --no-warnings +/** + * Extract proving metrics from GCP Cloud Logging for a prover node. + * + * Usage: + * ./extract_proving_metrics.ts --start [--end ] [--epoch ] [--project ] [--pod ] + * + * Examples: + * # Auto-detect first epoch with >=1 tx after the given start time: + * ./extract_proving_metrics.ts prove-n-tps-real --start 2026-03-01T19:00:00Z + * + * # Specify epoch number: + * ./extract_proving_metrics.ts prove-n-tps-real --start 2026-03-01T19:00:00Z --epoch 3 + * + * # Explicit time range (no auto-detection): + * ./extract_proving_metrics.ts prove-n-tps-real --start 2026-03-01T19:58:00Z --end 2026-03-01T20:25:00Z + */ + +import { exec } from "node:child_process"; +import { promisify } from "node:util"; + +const execAsync = promisify(exec); + +// ── CLI arg parsing ────────────────────────────────────────────────────────── + +function parseArgs(argv: string[]): { + namespace: string; + start: string; + end: string; + epoch: number | undefined; + project: string; + pod: string; +} { + const args = argv.slice(2); + const positional: string[] = []; + const flags: Record = {}; + + for (let i = 0; i < args.length; i++) { + if (args[i].startsWith("--")) { + const key = args[i].slice(2); + if (i + 1 < args.length && !args[i + 1].startsWith("--")) { + flags[key] = args[++i]; + } else { + flags[key] = "true"; + } + } else { + positional.push(args[i]); + } + } + + const namespace = positional[0]; + if (!namespace) { + console.error( + "Usage: extract_proving_metrics.ts --start [--end ] [--epoch ] [--project ] [--pod ]", + ); + process.exit(1); + } + if (!flags.start) { + console.error("Error: --start is required (ISO 8601 timestamp)"); + process.exit(1); + } + + // Default end: now + const defaultEnd = new Date().toISOString(); + + return { + namespace, + start: flags.start, + end: flags.end || defaultEnd, + epoch: flags.epoch !== undefined ? parseInt(flags.epoch) : undefined, + project: flags.project || "testnet-440309", + pod: flags.pod || `${namespace}-prover-node-0`, + }; +} + +const config = parseArgs(process.argv); + +// ── GCP log query helpers ──────────────────────────────────────────────────── + +interface LogEntry { + timestamp: string; + jsonPayload?: { + message?: string; + [key: string]: any; + }; + [key: string]: any; +} + +function buildFilter( + textFilter: string, + opts?: { module?: string; pod?: string }, +): string { + const pod = opts?.pod ?? config.pod; + let filter = + `resource.type="k8s_container"` + + ` AND resource.labels.namespace_name="${config.namespace}"` + + ` AND resource.labels.pod_name="${pod}"` + + ` AND timestamp>="${config.start}"` + + ` AND timestamp<="${config.end}"` + + ` AND jsonPayload.message=~"${textFilter}"`; + if (opts?.module) { + filter += ` AND jsonPayload.module="${opts.module}"`; + } + return filter; +} + +async function queryLogs( + name: string, + textFilter: string, + opts?: { module?: string; pod?: string }, +): Promise { + const filter = buildFilter(textFilter, opts); + const cmd = [ + "gcloud", + "logging", + "read", + JSON.stringify(filter), + `--project=${config.project}`, + "--format=json", + `--freshness=7d`, + ].join(" "); + + process.stderr.write(` Querying: ${name}...\n`); + try { + const { stdout } = await execAsync(cmd, { maxBuffer: 50 * 1024 * 1024 }); + const entries: LogEntry[] = JSON.parse(stdout || "[]"); + process.stderr.write(` ${name}: ${entries.length} entries\n`); + return entries; + } catch (err: any) { + process.stderr.write(` ${name}: ERROR - ${err.message?.split("\n")[0]}\n`); + return []; + } +} + +// ── Epoch auto-detection ───────────────────────────────────────────────────── + +async function scanForEpoch(): Promise<{ start: string; end: string }> { + process.stderr.write( + `Scanning for epoch in ${config.start} to ${config.end}...\n\n`, + ); + + const [epochStarts, epochFinalized] = await Promise.all([ + queryLogs("scan-epoch-starts", "Starting epoch.*proving job"), + queryLogs("scan-epoch-finalized", "Finalized proof for epoch"), + ]); + + process.stderr.write("\n"); + + // Parse all epoch start entries + const starts: { + epoch: number; + txCount: number; + timestamp: string; + }[] = []; + for (const entry of epochStarts) { + const m = msg(entry); + const p = entry.jsonPayload || {}; + const epochMatch = m.match( + /Starting epoch (\d+).*checkpoints (\d+) to (\d+)/, + ); + if (epochMatch) { + starts.push({ + epoch: parseInt(epochMatch[1]), + txCount: p.epochSizeTxs ?? 0, + timestamp: entry.timestamp, + }); + } + } + + // Sort by timestamp ascending + starts.sort((a, b) => a.timestamp.localeCompare(b.timestamp)); + + // Pick target epoch + let target: (typeof starts)[0] | undefined; + if (config.epoch !== undefined) { + target = starts.find((s) => s.epoch === config.epoch); + if (!target) { + process.stderr.write( + `Warning: epoch ${config.epoch} not found in scan window. Using full window.\n`, + ); + return { start: config.start, end: config.end }; + } + } else { + target = starts.find((s) => s.txCount >= 1); + if (!target) { + process.stderr.write( + `Warning: no epoch with >=1 tx found in scan window. Using full window.\n`, + ); + return { start: config.start, end: config.end }; + } + } + + process.stderr.write( + `Found epoch ${target.epoch} (${target.txCount} txs) at ${target.timestamp}\n`, + ); + + // Find matching finalized entry + const finalized = epochFinalized.find((entry) => { + const m = msg(entry); + const match = m.match(/Finalized proof for epoch (\d+)/); + return match && parseInt(match[1]) === target.epoch; + }); + + // Epoch start timestamp minus a few seconds to capture all leading logs + const narrowedStart = new Date( + new Date(target.timestamp).getTime() - 5000, + ).toISOString(); + + let narrowedEnd: string; + if (finalized) { + // Pad 60s after finalized to capture trailing logs + narrowedEnd = new Date( + new Date(finalized.timestamp).getTime() + 60000, + ).toISOString(); + process.stderr.write( + `Epoch ${target.epoch} finalized at ${finalized.timestamp}\n`, + ); + } else { + narrowedEnd = config.end; + process.stderr.write( + `Epoch ${target.epoch} finalized entry not found, using scan window end.\n`, + ); + } + + process.stderr.write( + `Narrowed window: ${narrowedStart} to ${narrowedEnd}\n\n`, + ); + + return { start: narrowedStart, end: narrowedEnd }; +} + +// ── Pipeline order for proving job types ───────────────────────────────────── + +const PIPELINE_ORDER = [ + "PARITY_BASE", + "PARITY_ROOT", + "PUBLIC_CHONK_VERIFIER", + "PUBLIC_VM", + "PUBLIC_TX_BASE_ROLLUP", + "TX_MERGE_ROLLUP", + "BLOCK_ROOT_ROLLUP", + "BLOCK_ROOT_FIRST_ROLLUP", + "BLOCK_ROOT_SINGLE_TX_ROLLUP", + "BLOCK_MERGE_ROLLUP", + "CHECKPOINT_ROOT_ROLLUP", + "CHECKPOINT_MERGE_ROLLUP", + "ROOT_ROLLUP", +]; + +// ── Query definitions ──────────────────────────────────────────────────────── + +async function fetchAllData() { + process.stderr.write( + `Fetching logs for ${config.pod} in ${config.namespace}\n`, + ); + process.stderr.write(`Time range: ${config.start} to ${config.end}\n\n`); + + const brokerPod = `${config.namespace}-prover-broker-0`; + + const [ + epochStart, + blobFields, + blobBatching, + startingBlock, + processedTxs, + addingTxs, + epochFinalized, + brokerNewJobs, + brokerCompleteJobs, + ] = await Promise.all([ + queryLogs("epoch-start", "Starting epoch.*proving job"), + queryLogs("blob-fields", "Blob fields per checkpoint"), + queryLogs("blob-batching", "Final blob batching"), + queryLogs("starting-block", "Starting block", { + module: "prover-client:orchestrator", + }), + queryLogs("processed-txs", "Processed.*successful txs"), + queryLogs("adding-txs", "Adding.*transactions to block"), + queryLogs("epoch-finalized", "Finalized proof for epoch"), + queryLogs("broker-new-jobs", "New proving job", { pod: brokerPod }), + queryLogs("broker-complete-jobs", "Proving job complete", { + pod: brokerPod, + }), + ]); + + process.stderr.write("\n"); + return { + epochStart, + blobFields, + blobBatching, + startingBlock, + processedTxs, + addingTxs, + epochFinalized, + brokerNewJobs, + brokerCompleteJobs, + }; +} + +// ── Time helpers ───────────────────────────────────────────────────────────── + +function formatDelta(ms: number): string { + const totalSeconds = Math.round(ms / 1000); + if (totalSeconds < 60) return `${totalSeconds}s`; + const minutes = Math.floor(totalSeconds / 60); + const seconds = totalSeconds % 60; + if (minutes < 60) return `${minutes}m ${seconds}s`; + const hours = Math.floor(minutes / 60); + const mins = minutes % 60; + return `${hours}h ${mins}m ${seconds}s`; +} + +function minTimestamp(entries: LogEntry[]): string | null { + if (entries.length === 0) return null; + return entries.reduce( + (min, e) => (e.timestamp < min ? e.timestamp : min), + entries[0].timestamp, + ); +} + +function maxTimestamp(entries: LogEntry[]): string | null { + if (entries.length === 0) return null; + return entries.reduce( + (max, e) => (e.timestamp > max ? e.timestamp : max), + entries[0].timestamp, + ); +} + +// ── Parsing helpers ────────────────────────────────────────────────────────── + +function msg(entry: LogEntry): string { + return entry.jsonPayload?.message || ""; +} + +function parseEpochStart(entries: LogEntry[]): { + epoch: number; + fromCheckpoint: number; + toCheckpoint: number; + fromBlock: number; + toBlock: number; + txCount: number; +} | null { + if (entries.length === 0) return null; + const entry = entries[0]; + const m = msg(entry); + const p = entry.jsonPayload || {}; + // Message: "Starting epoch 3 proving job with checkpoints 33 to 64" + // Structured fields: epochNumber, fromBlock, toBlock, epochSizeTxs + const epochMatch = m.match( + /Starting epoch (\d+).*checkpoints (\d+) to (\d+)/, + ); + if (!epochMatch) return null; + return { + epoch: parseInt(epochMatch[1]), + fromCheckpoint: parseInt(epochMatch[2]), + toCheckpoint: parseInt(epochMatch[3]), + fromBlock: p.fromBlock ?? 0, + toBlock: p.toBlock ?? 0, + txCount: p.epochSizeTxs ?? 0, + }; +} + +function parseBlobFields(entries: LogEntry[]): string | null { + if (entries.length === 0) return null; + const m = msg(entries[0]); + // "Blob fields per checkpoint: 211.92427600175142ms" + const match = m.match(/Blob fields per checkpoint:\s*([\d.]+)ms/); + return match ? `${parseFloat(match[1]).toFixed(2)}ms` : null; +} + +function parseBlobBatching(entries: LogEntry[]): string | null { + if (entries.length === 0) return null; + const m = msg(entries[0]); + // "Final blob batching challeneger: 3408.9118730016053ms" (note typo in source) + const match = m.match(/Final blob batching.*?:\s*([\d.]+)ms/); + return match ? `${parseFloat(match[1]).toFixed(2)}ms` : null; +} + +interface BlockInfo { + blockNumber: number; + slot: number; + txCount: number; + processingTime: number; // seconds +} + +function parseStartingBlocks( + entries: LogEntry[], +): Map { + // "Starting block 175 for slot 112." + const result = new Map(); + for (const entry of entries) { + const m2 = msg(entry); + const match = m2.match(/Starting block (\d+) for slot (\d+)/); + if (match) { + const blockNumber = parseInt(match[1]); + const slot = parseInt(match[2]); + result.set(blockNumber, { blockNumber, slot }); + } + } + return result; +} + +function parseProcessedTxs( + entries: LogEntry[], +): { timestamp: string; txCount: number; duration: number }[] { + // "Processed 18 successful txs and 0 failed txs in 29.2s" + const results: { timestamp: string; txCount: number; duration: number }[] = + []; + for (const entry of entries) { + const m2 = msg(entry); + const match = m2.match(/Processed (\d+) successful txs.*?in ([\d.]+)s/); + if (match) { + results.push({ + timestamp: entry.timestamp, + txCount: parseInt(match[1]), + duration: parseFloat(match[2]), + }); + } + } + return results; +} + +function parseAddingTxs( + entries: LogEntry[], +): { timestamp: string; txCount: number; blockNumber: number }[] { + // "Adding 6 transactions to block 175" + const results: { timestamp: string; txCount: number; blockNumber: number }[] = + []; + for (const entry of entries) { + const m2 = msg(entry); + const match = m2.match(/Adding (\d+) transactions to block (\d+)/); + if (match) { + results.push({ + timestamp: entry.timestamp, + txCount: parseInt(match[1]), + blockNumber: parseInt(match[2]), + }); + } + } + return results; +} + +function parseEpochFinalized(entries: LogEntry[]): { duration: string } | null { + if (entries.length === 0) return null; + const p = entries[0].jsonPayload || {}; + // Duration is in jsonPayload.duration (milliseconds) + const durationMs = p.duration; + if (durationMs == null) return null; + const totalSeconds = durationMs / 1000; + const minutes = Math.floor(totalSeconds / 60); + const seconds = Math.round(totalSeconds % 60); + return { duration: `${minutes}m ${seconds}s` }; +} + +// ── Broker job parsing ─────────────────────────────────────────────────────── + +interface BrokerJobStage { + enqueuedFirst: number; + enqueuedLast: number; + completedFirst: number; + completedLast: number; + count: number; + completedCount: number; +} + +interface BrokerJobDuration { + type: string; + durationMs: number; +} + +function parseBrokerJobs( + newEntries: LogEntry[], + completeEntries: LogEntry[], +): { stages: Map; durations: BrokerJobDuration[] } { + // Index "new" entries by provingJobId for duration matching + const newById = new Map(); + + // Aggregate per-type timestamps + const stages = new Map(); + + for (const entry of newEntries) { + const m2 = msg(entry); + const typeMatch = m2.match(/id=\d+:(\w+):/); + if (!typeMatch) continue; + const type = typeMatch[1]; + const ts = new Date(entry.timestamp).getTime(); + const jobId = entry.jsonPayload?.provingJobId; + if (jobId) { + newById.set(jobId, { type, timestamp: ts }); + } + + const existing = stages.get(type); + if (existing) { + existing.enqueuedFirst = Math.min(existing.enqueuedFirst, ts); + existing.enqueuedLast = Math.max(existing.enqueuedLast, ts); + existing.count++; + } else { + stages.set(type, { + enqueuedFirst: ts, + enqueuedLast: ts, + completedFirst: Infinity, + completedLast: -Infinity, + count: 1, + completedCount: 0, + }); + } + } + + const durations: BrokerJobDuration[] = []; + + for (const entry of completeEntries) { + const m2 = msg(entry); + const typeMatch = m2.match(/type=(\w+)/); + if (!typeMatch) continue; + const type = typeMatch[1]; + const ts = new Date(entry.timestamp).getTime(); + + const existing = stages.get(type); + if (existing) { + existing.completedFirst = Math.min(existing.completedFirst, ts); + existing.completedLast = Math.max(existing.completedLast, ts); + existing.completedCount++; + } else { + stages.set(type, { + enqueuedFirst: Infinity, + enqueuedLast: -Infinity, + completedFirst: ts, + completedLast: ts, + count: 0, + completedCount: 1, + }); + } + + // Match with new entry for per-job duration + const jobId = entry.jsonPayload?.provingJobId; + if (jobId) { + const newEntry = newById.get(jobId); + if (newEntry) { + durations.push({ type, durationMs: ts - newEntry.timestamp }); + } + } + } + + return { stages, durations }; +} + +function computeDurationStats( + durations: BrokerJobDuration[], +): Map< + string, + { count: number; median: number; mean: number; p90: number; max: number } +> { + // Group by type + const byType = new Map(); + for (const d of durations) { + const arr = byType.get(d.type); + if (arr) { + arr.push(d.durationMs); + } else { + byType.set(d.type, [d.durationMs]); + } + } + + const stats = new Map< + string, + { count: number; median: number; mean: number; p90: number; max: number } + >(); + for (const [type, values] of byType) { + values.sort((a, b) => a - b); + const count = values.length; + const mean = values.reduce((a, b) => a + b, 0) / count; + const median = + count % 2 === 0 + ? (values[count / 2 - 1] + values[count / 2]) / 2 + : values[Math.floor(count / 2)]; + const p90Index = Math.min(Math.ceil(count * 0.9) - 1, count - 1); + const p90 = values[p90Index]; + const max = values[count - 1]; + stats.set(type, { count, median, mean, p90, max }); + } + + return stats; +} + +function sortedJobTypes(stages: Map): string[] { + const ordered: string[] = []; + const seen = new Set(); + + // First add types in pipeline order + for (const type of PIPELINE_ORDER) { + if (stages.has(type)) { + ordered.push(type); + seen.add(type); + } + } + + // Then append any remaining types sorted by first enqueue time + const remaining = [...stages.entries()] + .filter(([type]) => !seen.has(type)) + .sort((a, b) => a[1].enqueuedFirst - b[1].enqueuedFirst) + .map(([type]) => type); + + return [...ordered, ...remaining]; +} + +// ── Correlate per-block data ───────────────────────────────────────────────── + +function correlateBlocks( + processedTxs: { timestamp: string; txCount: number; duration: number }[], + addingTxs: { timestamp: string; txCount: number; blockNumber: number }[], + startingBlocks: Map, +): BlockInfo[] { + // "Processed" and "Adding" entries share identical timestamps. + // Sort both by timestamp and zip 1:1. Slot comes from "Starting block" entries. + const sorted_processed = [...processedTxs].sort((a, b) => + a.timestamp.localeCompare(b.timestamp), + ); + const sorted_adding = [...addingTxs].sort((a, b) => + a.timestamp.localeCompare(b.timestamp), + ); + + const blocks: BlockInfo[] = []; + + if (sorted_processed.length !== sorted_adding.length) { + process.stderr.write( + `Warning: processed (${sorted_processed.length}) and adding (${sorted_adding.length}) entry counts differ. ` + + `Correlating by position.\n`, + ); + } + + const count = Math.min(sorted_processed.length, sorted_adding.length); + for (let i = 0; i < count; i++) { + const blockNumber = sorted_adding[i].blockNumber; + const slotInfo = startingBlocks.get(blockNumber); + blocks.push({ + blockNumber, + slot: slotInfo?.slot ?? 0, + txCount: sorted_adding[i].txCount, + processingTime: sorted_processed[i].duration, + }); + } + + // Sort by block number for output + blocks.sort((a, b) => a.blockNumber - b.blockNumber); + return blocks; +} + +// ── Format output ──────────────────────────────────────────────────────────── + +function formatOutput(data: Awaited>): string { + const lines: string[] = []; + + const epochInfo = parseEpochStart(data.epochStart); + if (epochInfo) { + const checkpointCount = + epochInfo.toCheckpoint - epochInfo.fromCheckpoint + 1; + const blockCount = epochInfo.toBlock - epochInfo.fromBlock + 1; + lines.push(`Epoch ${epochInfo.epoch} stats:`); + lines.push( + ` Checkpoints: ${checkpointCount} (${epochInfo.fromCheckpoint} to ${epochInfo.toCheckpoint}), ` + + `Blocks: ${blockCount} (${epochInfo.fromBlock} to ${epochInfo.toBlock}), ` + + `Txs: ${epochInfo.txCount}`, + ); + } else { + lines.push("Epoch stats: not found"); + } + + const blobFieldsTime = parseBlobFields(data.blobFields); + if (blobFieldsTime) { + lines.push(` Blob fields per checkpoint: ${blobFieldsTime}`); + } + + const blobBatchingTime = parseBlobBatching(data.blobBatching); + if (blobBatchingTime) { + lines.push(` Blob batching: ${blobBatchingTime}`); + } + + // Timeline + const epochStartTs = data.epochStart[0]?.timestamp; + const blocksStartTs = minTimestamp(data.startingBlock); + const blocksEndTs = maxTimestamp(data.processedTxs); + const epochEndTs = data.epochFinalized[0]?.timestamp; + + const { stages, durations } = parseBrokerJobs( + data.brokerNewJobs, + data.brokerCompleteJobs, + ); + + if (epochStartTs) { + const t0 = new Date(epochStartTs).getTime(); + + // Build all timeline events: [timestamp_ms, label] + const events: [number, string][] = []; + if (epochStartTs) + events.push([new Date(epochStartTs).getTime(), "Epoch started proving"]); + if (blocksStartTs) + events.push([ + new Date(blocksStartTs).getTime(), + "Blocks started processing", + ]); + if (blocksEndTs) + events.push([ + new Date(blocksEndTs).getTime(), + "Blocks finished processing", + ]); + if (epochEndTs) + events.push([new Date(epochEndTs).getTime(), "Epoch finished proving"]); + + // Add per-stage proving events + for (const [type, s] of stages) { + if (s.count > 0) { + events.push([ + s.enqueuedFirst, + `${type} first enqueued (${s.count} jobs)`, + ]); + if (s.count > 1) { + events.push([s.enqueuedLast, `${type} last enqueued`]); + } + } + if (s.completedCount > 0) { + events.push([s.completedLast, `${type} last proof completed`]); + } + } + + // Sort chronologically (stable) + events.sort((a, b) => a[0] - b[0]); + + const labelWidth = Math.max(...events.map(([, label]) => label.length)); + lines.push(""); + lines.push("Timeline:"); + for (const [ts, label] of events) { + const delta = ts - t0; + lines.push(` ${label.padEnd(labelWidth)} T+${formatDelta(delta)}`); + } + } + if (stages.size > 0 && epochStartTs) { + const t0 = new Date(epochStartTs).getTime(); + const types = sortedJobTypes(stages); + const typeWidth = Math.max(...types.map((t) => t.length)); + const countWidth = Math.max( + ...types.map((t) => String(stages.get(t)!.count).length), + ); + + lines.push(""); + lines.push("Proving jobs by stage:"); + for (const type of types) { + const s = stages.get(type)!; + const countStr = String(s.count).padStart(countWidth); + const typeStr = type.padEnd(typeWidth); + + // Enqueue range + let enqueueStr: string; + if (s.count === 0) { + enqueueStr = "n/a"; + } else if (s.count === 1) { + enqueueStr = `enqueued T+${formatDelta(s.enqueuedFirst - t0)}`; + } else { + enqueueStr = `enqueued T+${formatDelta(s.enqueuedFirst - t0)}..T+${formatDelta(s.enqueuedLast - t0)}`; + } + + // Complete range + let completeStr: string; + if (s.completedCount === 0) { + completeStr = "not completed"; + } else if (s.completedCount === 1) { + completeStr = `completed T+${formatDelta(s.completedFirst - t0)}`; + } else { + completeStr = `completed T+${formatDelta(s.completedFirst - t0)}..T+${formatDelta(s.completedLast - t0)}`; + } + + // Wall-clock duration from first enqueue to last complete + let wallStr = ""; + if (s.count > 0 && s.completedCount > 0) { + wallStr = ` (${formatDelta(s.completedLast - s.enqueuedFirst)})`; + } + + lines.push( + ` ${typeStr} ${countStr} jobs ${enqueueStr.padEnd(35)}${completeStr}${wallStr}`, + ); + } + } + + // Per-job duration stats + if (durations.length > 0) { + const durationStats = computeDurationStats(durations); + const types = sortedJobTypes(stages).filter((t) => durationStats.has(t)); + // Add any types not in stages (shouldn't happen but be safe) + for (const t of durationStats.keys()) { + if (!types.includes(t)) types.push(t); + } + + if (types.length > 0) { + const typeWidth = Math.max(...types.map((t) => t.length), 4); + const formatS = (ms: number) => `${(ms / 1000).toFixed(1)}s`; + + lines.push(""); + lines.push("Per-job duration stats:"); + lines.push( + ` ${"Type".padEnd(typeWidth)} ${"Count".padStart(6)} ${"Median".padStart(8)} ${"Mean".padStart(8)} ${"p90".padStart(8)} ${"Max".padStart(8)}`, + ); + for (const type of types) { + const s = durationStats.get(type)!; + lines.push( + ` ${type.padEnd(typeWidth)} ${String(s.count).padStart(6)} ${formatS(s.median).padStart(8)} ${formatS(s.mean).padStart(8)} ${formatS(s.p90).padStart(8)} ${formatS(s.max).padStart(8)}`, + ); + } + } + } + + // Per-block data + const processedTxs = parseProcessedTxs(data.processedTxs); + const addingTxs = parseAddingTxs(data.addingTxs); + const startingBlocks = parseStartingBlocks(data.startingBlock); + const blocks = correlateBlocks(processedTxs, addingTxs, startingBlocks); + + if (blocks.length > 0) { + lines.push(""); + lines.push(`Per block (sorted by block number):`); + for (const block of blocks) { + const time = block.processingTime.toFixed(1); + lines.push( + ` Block ${block.blockNumber} (slot ${block.slot}): ${block.txCount} txs, processing ${time}s`, + ); + } + } + + // Epoch proof duration + const finalized = parseEpochFinalized(data.epochFinalized); + if (finalized) { + lines.push(""); + lines.push(`Epoch proof duration: ${finalized.duration}`); + } + + return lines.join("\n"); +} + +// ── Main ───────────────────────────────────────────────────────────────────── + +async function main() { + const scanResult = await scanForEpoch(); + config.start = scanResult.start; + config.end = scanResult.end; + + const data = await fetchAllData(); + const output = formatOutput(data); + console.log(output); +} + +main().catch((err) => { + console.error(`Fatal: ${err.message}`); + process.exit(1); +}); From abcea4a7c75f5e9cdc842fbfa9252b7986d1ecb1 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Mar 2026 09:42:16 +0000 Subject: [PATCH 03/37] chore: update pause script to allow resume (#21032) Updates the pause script to save its state in order for later resume. --- spartan/scripts/network_pause.sh | 53 ++++++++++++++++++--- spartan/scripts/network_resume.sh | 76 +++++++++++++++++++++++++++++++ 2 files changed, 123 insertions(+), 6 deletions(-) create mode 100755 spartan/scripts/network_resume.sh diff --git a/spartan/scripts/network_pause.sh b/spartan/scripts/network_pause.sh index 3d28a5389b44..706b9553b911 100755 --- a/spartan/scripts/network_pause.sh +++ b/spartan/scripts/network_pause.sh @@ -31,16 +31,55 @@ if [[ -z "$NAMESPACE" ]]; then usage fi -log "Snapshotting $NAMESPACE" -$scripts_dir/manual_snapshot.sh $NAMESPACE +CONFIGMAP_NAME="network-pause-state" -log "Waiting for snapshot upload" -sleep 60 # staging-ignition takes 28s +# Guard against double-pause (would overwrite saved state with zeros) +if kubectl get configmap "$CONFIGMAP_NAME" -n "$NAMESPACE" &>/dev/null; then + die "Namespace $NAMESPACE is already paused (ConfigMap $CONFIGMAP_NAME exists). Run network_resume.sh first." +fi + +# Snapshot if the cronjob exists (not all networks have snapshots enabled) +SNAPSHOT_CRONJOB="$NAMESPACE-snapshot-aztec-snapshots" +if kubectl get cronjob "$SNAPSHOT_CRONJOB" -n "$NAMESPACE" &>/dev/null; then + log "Snapshotting $NAMESPACE" + $scripts_dir/manual_snapshot.sh $NAMESPACE + log "Waiting for snapshot upload" + sleep 60 # staging-ignition takes 28s +else + log "Snapshot cronjob not found ($SNAPSHOT_CRONJOB), skipping snapshot" +fi + +# Collect current replica counts before scaling down +log "Collecting current replica counts" + +SS_JSON=$(kubectl get statefulset -n "$NAMESPACE" -o json | \ + jq '[.items[] | {key: .metadata.name, value: .spec.replicas}] | from_entries') +DEPLOY_JSON=$(kubectl get deployment -n "$NAMESPACE" -o json | \ + jq '[.items[] | {key: .metadata.name, value: .spec.replicas}] | from_entries') + +CRONJOB_JSON=$(kubectl get cronjob -n "$NAMESPACE" -o json | \ + jq '[.items[] | select(.spec.suspend != true) | .metadata.name]') + +STATE_JSON=$(jq -n \ + --arg paused_at "$(date -Is)" \ + --argjson statefulsets "$SS_JSON" \ + --argjson deployments "$DEPLOY_JSON" \ + --argjson cronjobs "$CRONJOB_JSON" \ + '{paused_at: $paused_at, statefulsets: $statefulsets, deployments: $deployments, cronjobs: $cronjobs}') + +log "Saving pause state to ConfigMap $CONFIGMAP_NAME" +kubectl create configmap "$CONFIGMAP_NAME" \ + -n "$NAMESPACE" \ + --from-literal=state="$STATE_JSON" + +# Scale everything down except eth-devnet (L1 beacon chain cannot recover from long pauses) log "Pausing namespace $NAMESPACE" for item_type in statefulset deployment; do - for item in $(kubectl get $item_type -n $NAMESPACE -o jsonpath='{.items[*].metadata.name}'); do - kubectl scale -n $NAMESPACE $item_type/$item --replicas 0 + for item in $(kubectl get "$item_type" -n "$NAMESPACE" -o json | \ + jq -r '.items[] | select(.metadata.labels["app.kubernetes.io/name"] != "eth-devnet") | .metadata.name'); do + log " Scaling $item_type/$item to 0" + kubectl scale -n "$NAMESPACE" "$item_type/$item" --replicas 0 done done @@ -48,3 +87,5 @@ log "Suspending cronjobs" for item in $(kubectl get cronjob -n $NAMESPACE -o jsonpath='{.items[*].metadata.name}'); do kubectl -n $NAMESPACE patch cronjobs $item -p '{"spec" : {"suspend" : true }}' done + +log "Namespace $NAMESPACE paused successfully. State saved to ConfigMap $CONFIGMAP_NAME." diff --git a/spartan/scripts/network_resume.sh b/spartan/scripts/network_resume.sh new file mode 100755 index 000000000000..6058a17bf250 --- /dev/null +++ b/spartan/scripts/network_resume.sh @@ -0,0 +1,76 @@ +#!/usr/bin/env bash + +set -euo pipefail + +spartan=$(git rev-parse --show-toplevel)/spartan +scripts_dir=$spartan/scripts + +log() { echo "[INFO] $(date -Is) - $*"; } +err() { echo "[ERROR] $(date -Is) - $*" >&2; } +die() { err "$*"; exit 1; } + +usage() { + echo "Usage: $0 [namespace]" + echo "" + echo "Arguments:" + echo " namespace - Kubernetes namespace (default: from NAMESPACE env var)" + echo "" + echo "Environment variables:" + echo " NAMESPACE - K8s namespace (required if not passed as argument)" + echo "" + exit 1 +} + +NAMESPACE="${1:-${NAMESPACE:-}}" + +if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then + usage +fi + +if [[ -z "$NAMESPACE" ]]; then + usage +fi + +CONFIGMAP_NAME="network-pause-state" + +# Read saved state +log "Reading pause state from ConfigMap $CONFIGMAP_NAME" +STATE_JSON=$(kubectl get configmap "$CONFIGMAP_NAME" -n "$NAMESPACE" -o jsonpath='{.data.state}') || \ + die "ConfigMap $CONFIGMAP_NAME not found in namespace $NAMESPACE. Is the network paused?" + +echo "$STATE_JSON" | jq . >/dev/null 2>&1 || die "Invalid JSON in ConfigMap $CONFIGMAP_NAME" +paused_at=$(echo "$STATE_JSON" | jq -r '.paused_at') +log "Network was paused at $paused_at" + +# Restore statefulset replicas +log "Restoring statefulsets" +for name in $(echo "$STATE_JSON" | jq -r '.statefulsets | keys[]'); do + replicas=$(echo "$STATE_JSON" | jq -r --arg name "$name" '.statefulsets[$name]') + if [[ "$replicas" -gt 0 ]]; then + log " Scaling statefulset/$name to $replicas replicas" + kubectl scale -n "$NAMESPACE" statefulset/"$name" --replicas "$replicas" + fi +done + +# Restore deployment replicas +log "Restoring deployments" +for name in $(echo "$STATE_JSON" | jq -r '.deployments | keys[]'); do + replicas=$(echo "$STATE_JSON" | jq -r --arg name "$name" '.deployments[$name]') + if [[ "$replicas" -gt 0 ]]; then + log " Scaling deployment/$name to $replicas replicas" + kubectl scale -n "$NAMESPACE" deployment/"$name" --replicas "$replicas" + fi +done + +# Unsuspend only cronjobs that were active before pause +log "Unsuspending cronjobs" +for name in $(echo "$STATE_JSON" | jq -r '.cronjobs[]'); do + log " Unsuspending cronjob/$name" + kubectl -n "$NAMESPACE" patch cronjobs "$name" -p '{"spec" : {"suspend" : false }}' +done + +# Clean up +log "Cleaning up ConfigMap $CONFIGMAP_NAME" +kubectl delete configmap "$CONFIGMAP_NAME" -n "$NAMESPACE" + +log "Namespace $NAMESPACE resumed successfully." From 22a8254e4c010972dead26e9231f42c138b0caac Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Tue, 3 Mar 2026 10:53:45 +0100 Subject: [PATCH 04/37] feat: price bump for RPC transaction replacement (#20806) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Adds a configurable percentage-based "price bump" requirement for RPC-submitted transactions that clash on nullifiers with existing pool transactions, or that need to evict the lowest-priority tx when the pool is full. This prevents spam via infinitesimally small fee increments. - When a tx arrives via RPC with nullifier conflicts, it must now pay at least X% above each conflicting tx's priority fee (default: 10%) — i.e. `>= existingFee + existingFee * bump / 100` - The same bump applies to pool-full eviction via `LowPriorityPreAddRule` - P2P gossip path is unchanged — continues using `comparePriority` (fee + hash tiebreaker) with no bump - Rejection errors now include the minimum required fee so callers know how much to bid - New env var `P2P_RPC_PRICE_BUMP_PERCENTAGE` (default: 10) controls the bump percentage ## Implementation details - `getMinimumPriceBumpFee(existingFee, priceBumpPercentage)` helper computes the threshold using integer arithmetic: `existingFee + max(existingFee * bump / 100, 1)` — the minimum bump is always at least 1 unit, so replacement always requires paying strictly more (even with 0% bump or zero existing fee) - `priceBumpPercentage` is typed as `bigint` throughout the config chain to avoid `BigInt()` conversion issues with non-integer values - `checkNullifierConflict` accepts an optional `priceBumpPercentage` param; when set, uses fee-only `>=` comparison against the bumped threshold instead of `comparePriority` - `NullifierConflictRule` now passes `context.priceBumpPercentage` through to the conflict check (previously ignored context entirely) - `LowPriorityPreAddRule` uses the bumped fee threshold when both `feeComparisonOnly` and `priceBumpPercentage` are set - Config flows: `P2P_RPC_PRICE_BUMP_PERCENTAGE` env var -> `P2PConfig` -> `TxPoolV2Config` -> `PreAddContext` (only for RPC path) - `NULLIFIER_CONFLICT` rejection error enriched with `minimumPriceBumpFee` and `txPriorityFee` fields ## Test plan - 15 new unit tests across `tx_metadata.test.ts`, `nullifier_conflict_rule.test.ts`, and `low_priority_pre_add_rule.test.ts` - Tests cover: exact threshold acceptance, below-threshold rejection, well-above threshold, 0% bump edge case, P2P path unchanged, error field population - All existing tests continue to pass Fixes A-452 --- .../operators/reference/changelog/v4.md | 12 ++ yarn-project/foundation/src/config/env_var.ts | 1 + yarn-project/p2p/src/client/factory.ts | 1 + yarn-project/p2p/src/config.ts | 10 ++ .../p2p/src/mem_pools/tx_pool_v2/README.md | 10 +- .../tx_pool_v2/eviction/interfaces.ts | 12 +- .../low_priority_pre_add_rule.test.ts | 66 +++++++++ .../eviction/low_priority_pre_add_rule.ts | 21 ++- .../eviction/nullifier_conflict_rule.test.ts | 104 +++++++++++++- .../eviction/nullifier_conflict_rule.ts | 3 +- .../src/mem_pools/tx_pool_v2/interfaces.ts | 3 + .../mem_pools/tx_pool_v2/tx_metadata.test.ts | 130 +++++++++++++++++- .../src/mem_pools/tx_pool_v2/tx_metadata.ts | 44 +++++- .../mem_pools/tx_pool_v2/tx_pool_v2_impl.ts | 4 +- 14 files changed, 402 insertions(+), 19 deletions(-) diff --git a/docs/docs-operate/operators/reference/changelog/v4.md b/docs/docs-operate/operators/reference/changelog/v4.md index dc8e0cce3d35..8bd0522245d0 100644 --- a/docs/docs-operate/operators/reference/changelog/v4.md +++ b/docs/docs-operate/operators/reference/changelog/v4.md @@ -161,6 +161,18 @@ Transaction submission via RPC now returns structured rejection codes when a tra **Impact**: Improved developer experience — callers can now programmatically handle specific rejection reasons. +### RPC transaction replacement price bump + +Transactions submitted via RPC that clash on nullifiers with existing pool transactions must now pay at least X% more in priority fee to replace them. The same bump applies when the pool is full and the incoming tx needs to evict the lowest-priority tx. P2P gossip behavior is unchanged. + +**Configuration:** + +```bash +P2P_RPC_PRICE_BUMP_PERCENTAGE=10 # default: 10 (percent) +``` + +Set to `0` to disable the percentage-based bump (still requires strictly higher fee). + ### Setup allow list extendable via network config The setup phase allow list can now be extended via the network configuration JSON (`txPublicSetupAllowListExtend` field). This allows network operators to distribute additional allowed setup functions to all nodes without requiring code changes. The local environment variable takes precedence over the network-json value. diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index c7bdfed39b44..666eb044454d 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -152,6 +152,7 @@ export type EnvVar = | 'P2P_DROP_TX_CHANCE' | 'P2P_TX_POOL_DELETE_TXS_AFTER_REORG' | 'P2P_MIN_TX_POOL_AGE_MS' + | 'P2P_RPC_PRICE_BUMP_PERCENTAGE' | 'DEBUG_P2P_INSTRUMENT_MESSAGES' | 'PEER_ID_PRIVATE_KEY' | 'PEER_ID_PRIVATE_KEY_PATH' diff --git a/yarn-project/p2p/src/client/factory.ts b/yarn-project/p2p/src/client/factory.ts index e8de6a8055a7..0895c1b624cc 100644 --- a/yarn-project/p2p/src/client/factory.ts +++ b/yarn-project/p2p/src/client/factory.ts @@ -100,6 +100,7 @@ export async function createP2PClient( archivedTxLimit: config.archivedTxLimit, minTxPoolAgeMs: config.minTxPoolAgeMs, dropTransactionsProbability: config.dropTransactionsProbability, + priceBumpPercentage: config.priceBumpPercentage, }, dateProvider, ); diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index 050f2b8bb233..5189cab7adb9 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -1,6 +1,7 @@ import { type ConfigMappingsType, SecretValue, + bigintConfigHelper, booleanConfigHelper, getConfigFromMappings, getDefaultConfig, @@ -190,6 +191,9 @@ export interface P2PConfig /** Minimum age (ms) a transaction must have been in the pool before it's eligible for block building. */ minTxPoolAgeMs: number; + + /** Minimum percentage fee increase required to replace an existing tx via RPC (0 = no bump). */ + priceBumpPercentage: bigint; } export const DEFAULT_P2P_PORT = 40400; @@ -465,6 +469,12 @@ export const p2pConfigMappings: ConfigMappingsType = { description: 'Minimum age (ms) a transaction must have been in the pool before it is eligible for block building.', ...numberConfigHelper(2_000), }, + priceBumpPercentage: { + env: 'P2P_RPC_PRICE_BUMP_PERCENTAGE', + description: + 'Minimum percentage fee increase required to replace an existing tx via RPC. Even at 0%, replacement still requires paying at least 1 unit more.', + ...bigintConfigHelper(10n), + }, ...sharedSequencerConfigMappings, ...p2pReqRespConfigMappings, ...batchTxRequesterConfigMappings, diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/README.md b/yarn-project/p2p/src/mem_pools/tx_pool_v2/README.md index 25dfc1d12435..64cf64650ba3 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/README.md +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/README.md @@ -158,7 +158,7 @@ Checked before adding a transaction to the pending pool: | Rule | Purpose | |------|---------| -| `NullifierConflictRule` | Handles transactions with conflicting nullifiers. Higher priority tx wins. | +| `NullifierConflictRule` | Handles transactions with conflicting nullifiers. Higher priority tx wins. For RPC submissions, a configurable price bump percentage is required. | | `FeePayerBalancePreAddRule` | Ensures fee payer has sufficient balance for all their pending txs. | | `LowPriorityPreAddRule` | Rejects txs when pool is full and new tx has lowest priority. | @@ -233,6 +233,14 @@ await pool.updateConfig({ }); ``` +### Price Bump (RPC Transaction Replacement) + +When a transaction is submitted via RPC and clashes on nullifiers with an existing pool transaction, the incoming tx must pay at least `priceBumpPercentage`% more in priority fee (i.e. `>= existingFee + existingFee * bump / 100`) to replace it. This prevents spam via small fee increments. The same bump applies when the pool is full and the incoming tx needs to evict the lowest-priority tx. + +- **Env var**: `P2P_RPC_PRICE_BUMP_PERCENTAGE` (default: 10) +- **Scope**: RPC submissions only. P2P gossip uses `comparePriority` (fee + hash tiebreaker) with no bump. +- Even with a 0% bump, a replacement tx must pay at least 1 unit more than the existing fee. + ## Return Values ### AddTxsResult diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts index 32135758973d..dd488bb1597e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/interfaces.ts @@ -100,7 +100,15 @@ export type TxPoolRejectionError = availableBalance: bigint; feeLimit: bigint; } - | { code: typeof TxPoolRejectionCode.NULLIFIER_CONFLICT; message: string; conflictingTxHash: string } + | { + code: typeof TxPoolRejectionCode.NULLIFIER_CONFLICT; + message: string; + conflictingTxHash: string; + /** Minimum fee needed to replace the conflicting tx (only set when price bump applies). */ + minimumPriceBumpFee?: bigint; + /** Incoming tx's priority fee. */ + txPriorityFee?: bigint; + } | { code: typeof TxPoolRejectionCode.INTERNAL_ERROR; message: string }; /** @@ -121,6 +129,8 @@ export interface PreAddResult { export interface PreAddContext { /** If true, compare priority fee only (no tx hash tiebreaker). Used for RPC submissions. */ feeComparisonOnly?: boolean; + /** Percentage-based price bump required for tx replacement. Only set for RPC submissions. */ + priceBumpPercentage?: bigint; } /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts index fd66a0df4aee..57df8e341e49 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.test.ts @@ -209,5 +209,71 @@ describe('LowPriorityPreAddRule', () => { expect(result2.shouldIgnore).toBe(true); }); }); + + describe('with priceBumpPercentage', () => { + it('evicts when incoming fee exceeds the bump threshold', async () => { + const lowestPriorityMeta = createMeta('0x2222', 100n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 111n); // Above 10% bump + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 10n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain(lowestPriorityMeta.txHash); + }); + + it('evicts when incoming fee is exactly at the bump threshold', async () => { + const lowestPriorityMeta = createMeta('0x2222', 100n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 110n); // Exactly 10% bump — accepted + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 10n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain(lowestPriorityMeta.txHash); + }); + + it('ignores when incoming fee is below the bump threshold', async () => { + const lowestPriorityMeta = createMeta('0x2222', 100n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 109n); // Below 10% bump + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 10n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(true); + expect(result.reason?.code).toBe(TxPoolRejectionCode.LOW_PRIORITY_FEE); + if (result.reason?.code === TxPoolRejectionCode.LOW_PRIORITY_FEE) { + expect(result.reason.minimumPriorityFee).toBe(110n); + expect(result.reason.txPriorityFee).toBe(109n); + } + }); + + it('without price bump (P2P path), behavior unchanged', async () => { + const lowestPriorityMeta = createMeta('0x2222', 100n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 101n); + + // No context — uses comparePriority, 101 > 100 so incoming wins + const result = await rule.check(incomingMeta, poolAccess); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain(lowestPriorityMeta.txHash); + }); + + it('with 0% bump, rejects equal fee (minimum bump of 1)', async () => { + const lowestPriorityMeta = createMeta('0x2222', 100n); + const poolAccess = createPoolAccess(100, lowestPriorityMeta); + const incomingMeta = createMeta('0x1111', 100n); + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 0n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(true); + expect(result.txHashesToEvict).toHaveLength(0); + }); + }); }); }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts index b4d5ef8382db..013ffe6f8c6e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts @@ -1,6 +1,6 @@ import { createLogger } from '@aztec/foundation/log'; -import { type TxMetaData, comparePriority } from '../tx_metadata.js'; +import { type TxMetaData, comparePriority, getMinimumPriceBumpFee } from '../tx_metadata.js'; import { type EvictionConfig, type PreAddContext, @@ -48,10 +48,14 @@ export class LowPriorityPreAddRule implements PreAddRule { } // Compare incoming tx against lowest priority tx. - // feeOnly mode (RPC): use strict fee comparison only — avoids churn from hash ordering - // Default (gossip): use full comparePriority (fee + tx hash tiebreaker) for determinism + // feeOnly mode (RPC): use strict fee comparison only — avoids churn from hash ordering. + // When price bump is also set, require the bumped fee threshold. + // Default (gossip): use full comparePriority (fee + tx hash tiebreaker) for determinism. const isHigherPriority = context?.feeComparisonOnly - ? incomingMeta.priorityFee > lowestPriorityMeta.priorityFee + ? context.priceBumpPercentage !== undefined + ? incomingMeta.priorityFee >= + getMinimumPriceBumpFee(lowestPriorityMeta.priorityFee, context.priceBumpPercentage) + : incomingMeta.priorityFee > lowestPriorityMeta.priorityFee : comparePriority(incomingMeta, lowestPriorityMeta) > 0; if (isHigherPriority) { @@ -66,6 +70,11 @@ export class LowPriorityPreAddRule implements PreAddRule { } // Incoming tx has equal or lower priority - ignore it (it would be evicted anyway) + const minimumFee = + context?.feeComparisonOnly && context.priceBumpPercentage !== undefined + ? getMinimumPriceBumpFee(lowestPriorityMeta.priorityFee, context.priceBumpPercentage) + : lowestPriorityMeta.priorityFee + 1n; + this.log.debug( `Pool at capacity (${currentCount}/${this.maxPoolSize}), ignoring ${incomingMeta.txHash} ` + `(priority ${incomingMeta.priorityFee}) - lower than existing minimum (priority ${lowestPriorityMeta.priorityFee})`, @@ -75,8 +84,8 @@ export class LowPriorityPreAddRule implements PreAddRule { txHashesToEvict: [], reason: { code: TxPoolRejectionCode.LOW_PRIORITY_FEE, - message: `Tx does not meet minimum priority fee. Required: ${lowestPriorityMeta.priorityFee + 1n}, got: ${incomingMeta.priorityFee}`, - minimumPriorityFee: lowestPriorityMeta.priorityFee + 1n, + message: `Tx does not meet minimum priority fee. Required: ${minimumFee}, got: ${incomingMeta.priorityFee}`, + minimumPriorityFee: minimumFee, txPriorityFee: incomingMeta.priorityFee, }, }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts index 5108966f9047..f30ba1387587 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.test.ts @@ -1,5 +1,5 @@ import { type TxMetaData, stubTxMetaData } from '../tx_metadata.js'; -import type { PreAddPoolAccess } from './interfaces.js'; +import { type PreAddContext, type PreAddPoolAccess, TxPoolRejectionCode } from './interfaces.js'; import { NullifierConflictRule } from './nullifier_conflict_rule.js'; describe('NullifierConflictRule', () => { @@ -255,6 +255,108 @@ describe('NullifierConflictRule', () => { }); }); + describe('with priceBumpPercentage context', () => { + it('accepts tx when fee exceeds 10% bump threshold', async () => { + const sharedNullifier = '0xshared_null'; + const existingMeta = createMeta('0x2222', 100n, [sharedNullifier]); + const incomingMeta = createMeta('0x1111', 111n, [sharedNullifier]); // Above 10% + + const metadataMap = new Map([['0x2222', existingMeta]]); + const nullifierMap = new Map([[sharedNullifier, '0x2222']]); + poolAccess = createPoolAccess(nullifierMap, metadataMap); + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 10n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain('0x2222'); + }); + + it('accepts tx when fee is exactly at 10% bump threshold', async () => { + const sharedNullifier = '0xshared_null'; + const existingMeta = createMeta('0x2222', 100n, [sharedNullifier]); + const incomingMeta = createMeta('0x1111', 110n, [sharedNullifier]); // Exactly 10% — accepted + + const metadataMap = new Map([['0x2222', existingMeta]]); + const nullifierMap = new Map([[sharedNullifier, '0x2222']]); + poolAccess = createPoolAccess(nullifierMap, metadataMap); + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 10n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain('0x2222'); + }); + + it('rejects tx when fee is below 10% bump threshold', async () => { + const sharedNullifier = '0xshared_null'; + const existingMeta = createMeta('0x2222', 100n, [sharedNullifier]); + const incomingMeta = createMeta('0x1111', 109n, [sharedNullifier]); // Below 10% + + const metadataMap = new Map([['0x2222', existingMeta]]); + const nullifierMap = new Map([[sharedNullifier, '0x2222']]); + poolAccess = createPoolAccess(nullifierMap, metadataMap); + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 10n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(true); + expect(result.reason?.code).toBe(TxPoolRejectionCode.NULLIFIER_CONFLICT); + if (result.reason?.code === TxPoolRejectionCode.NULLIFIER_CONFLICT) { + expect(result.reason.minimumPriceBumpFee).toBe(110n); + expect(result.reason.txPriorityFee).toBe(109n); + } + }); + + it('accepts tx well above bump threshold', async () => { + const sharedNullifier = '0xshared_null'; + const existingMeta = createMeta('0x2222', 100n, [sharedNullifier]); + const incomingMeta = createMeta('0x1111', 200n, [sharedNullifier]); + + const metadataMap = new Map([['0x2222', existingMeta]]); + const nullifierMap = new Map([[sharedNullifier, '0x2222']]); + poolAccess = createPoolAccess(nullifierMap, metadataMap); + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 10n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain('0x2222'); + }); + + it('without price bump (P2P path), behavior is unchanged', async () => { + const sharedNullifier = '0xshared_null'; + const existingMeta = createMeta('0x2222', 100n, [sharedNullifier]); + const incomingMeta = createMeta('0x1111', 101n, [sharedNullifier]); // 1% above, not enough for 10% bump + + const metadataMap = new Map([['0x2222', existingMeta]]); + const nullifierMap = new Map([[sharedNullifier, '0x2222']]); + poolAccess = createPoolAccess(nullifierMap, metadataMap); + + // No context (P2P) — uses comparePriority, 101 > 100 means incoming wins + const result = await rule.check(incomingMeta, poolAccess); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toContain('0x2222'); + }); + + it('with 0% price bump, rejects equal fee (minimum bump of 1)', async () => { + const sharedNullifier = '0xshared_null'; + const existingMeta = createMeta('0x2222', 100n, [sharedNullifier]); + const incomingMeta = createMeta('0x1111', 100n, [sharedNullifier]); + + const metadataMap = new Map([['0x2222', existingMeta]]); + const nullifierMap = new Map([[sharedNullifier, '0x2222']]); + poolAccess = createPoolAccess(nullifierMap, metadataMap); + + const context: PreAddContext = { feeComparisonOnly: true, priceBumpPercentage: 0n }; + const result = await rule.check(incomingMeta, poolAccess, context); + + expect(result.shouldIgnore).toBe(true); + expect(result.txHashesToEvict).toHaveLength(0); + }); + }); + describe('edge cases', () => { it('skips self-reference (incoming tx hash in conflict list)', async () => { const nullifier = '0xnull1'; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts index 9b638e13e83d..534a6fa4526e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts @@ -15,11 +15,12 @@ export class NullifierConflictRule implements PreAddRule { private log = createLogger('p2p:tx_pool_v2:nullifier_conflict_rule'); - check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, _context?: PreAddContext): Promise { + check(incomingMeta: TxMetaData, poolAccess: PreAddPoolAccess, context?: PreAddContext): Promise { const result = checkNullifierConflict( incomingMeta, nullifier => poolAccess.getTxHashByNullifier(nullifier), txHash => poolAccess.getMetadata(txHash), + context?.priceBumpPercentage, ); if (result.shouldIgnore) { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts index a1a1ed0d9d69..a78a70482024 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/interfaces.ts @@ -46,6 +46,8 @@ export type TxPoolV2Config = { evictedTxCacheSize: number; /** The probability (0-1) that a transaction is discarded. 0 disables dropping. For testing purposes only. */ dropTransactionsProbability: number; + /** Minimum percentage fee increase required to replace an existing tx via RPC (0 = no bump). */ + priceBumpPercentage: bigint; }; /** @@ -57,6 +59,7 @@ export const DEFAULT_TX_POOL_V2_CONFIG: TxPoolV2Config = { minTxPoolAgeMs: 2_000, evictedTxCacheSize: 10_000, dropTransactionsProbability: 0, + priceBumpPercentage: 10n, }; /** diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts index d139138d5489..a4ba74f53105 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.test.ts @@ -1,7 +1,13 @@ import { mockTx } from '@aztec/stdlib/testing'; import { TxPoolRejectionCode } from './eviction/interfaces.js'; -import { buildTxMetaData, checkNullifierConflict, comparePriority, stubTxMetaData } from './tx_metadata.js'; +import { + buildTxMetaData, + checkNullifierConflict, + comparePriority, + getMinimumPriceBumpFee, + stubTxMetaData, +} from './tx_metadata.js'; describe('TxMetaData', () => { describe('buildTxMetaData', () => { @@ -260,5 +266,127 @@ describe('TxMetaData', () => { expect(result.shouldIgnore).toBe(false); expect(result.txHashesToEvict).toEqual([]); }); + + describe('with priceBumpPercentage', () => { + it('accepts incoming tx when fee exceeds the bump threshold', () => { + const existing = makeMeta('0x2222', 100n, ['0xnull1']); + const incoming = makeMeta('0x1111', 111n, ['0xnull1']); // Above 10% bump + + const result = checkNullifierConflict( + incoming, + () => existing.txHash, + () => existing, + 10n, // 10% bump + ); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toEqual([existing.txHash]); + }); + + it('accepts incoming tx when fee is exactly at the bump threshold', () => { + const existing = makeMeta('0x2222', 100n, ['0xnull1']); + const incoming = makeMeta('0x1111', 110n, ['0xnull1']); // Exactly 10% bump — accepted + + const result = checkNullifierConflict( + incoming, + () => existing.txHash, + () => existing, + 10n, + ); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toEqual([existing.txHash]); + }); + + it('rejects incoming tx when fee is below the bump threshold', () => { + const existing = makeMeta('0x2222', 100n, ['0xnull1']); + const incoming = makeMeta('0x1111', 109n, ['0xnull1']); // Below 10% bump + + const result = checkNullifierConflict( + incoming, + () => existing.txHash, + () => existing, + 10n, + ); + + expect(result.shouldIgnore).toBe(true); + expect(result.txHashesToEvict).toEqual([]); + expect(result.reason?.code).toBe(TxPoolRejectionCode.NULLIFIER_CONFLICT); + if (result.reason?.code === TxPoolRejectionCode.NULLIFIER_CONFLICT) { + expect(result.reason.minimumPriceBumpFee).toBe(110n); + expect(result.reason.txPriorityFee).toBe(109n); + } + }); + + it('accepts incoming tx well above the bump threshold', () => { + const existing = makeMeta('0x2222', 100n, ['0xnull1']); + const incoming = makeMeta('0x1111', 200n, ['0xnull1']); + + const result = checkNullifierConflict( + incoming, + () => existing.txHash, + () => existing, + 10n, + ); + + expect(result.shouldIgnore).toBe(false); + expect(result.txHashesToEvict).toEqual([existing.txHash]); + }); + + it('with 0% bump, rejects equal fee (minimum bump of 1)', () => { + const existing = makeMeta('0x2222', 100n, ['0xnull1']); + const incoming = makeMeta('0x1111', 100n, ['0xnull1']); + + const result = checkNullifierConflict( + incoming, + () => existing.txHash, + () => existing, + 0n, // 0% bump + ); + + expect(result.shouldIgnore).toBe(true); + expect(result.txHashesToEvict).toEqual([]); + }); + + it('without price bump, uses comparePriority (P2P path unchanged)', () => { + const existing = makeMeta('0x2222', 100n, ['0xnull1']); + const incoming = makeMeta('0x1111', 100n, ['0xnull1']); + + // No priceBumpPercentage — uses comparePriority, which for equal fees uses hash tiebreaker + const result = checkNullifierConflict( + incoming, + () => existing.txHash, + () => existing, + ); + + // With equal fees, the result depends on hash tiebreaker + // 0x1111 < 0x2222 so incoming has lower priority → should be ignored + expect(result.shouldIgnore).toBe(true); + }); + }); + }); + + describe('getMinimumPriceBumpFee', () => { + it('calculates 10% bump correctly', () => { + expect(getMinimumPriceBumpFee(100n, 10n)).toBe(110n); + }); + + it('calculates 0% bump (returns fee + 1 minimum bump)', () => { + expect(getMinimumPriceBumpFee(100n, 0n)).toBe(101n); + }); + + it('handles 0 existing fee (minimum bump of 1)', () => { + expect(getMinimumPriceBumpFee(0n, 10n)).toBe(1n); + }); + + it('handles large percentages', () => { + expect(getMinimumPriceBumpFee(100n, 100n)).toBe(200n); + expect(getMinimumPriceBumpFee(100n, 200n)).toBe(300n); + }); + + it('truncates fractional result (integer division)', () => { + // 33 * 10 / 100 = 3.3 → truncated to 3, so 33 + 3 = 36 + expect(getMinimumPriceBumpFee(33n, 10n)).toBe(36n); + }); }); }); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts index 316f551bcc6c..3874a7aab292 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts @@ -190,21 +190,38 @@ export function comparePriority(a: PriorityComparable, b: PriorityComparable): n return compareTxHash(a.txHashBigInt, b.txHashBigInt); } +/** + * Returns the minimum fee required to replace an existing tx with the given price bump percentage. + * Uses integer arithmetic: `existingFee + existingFee * priceBumpPercentage / 100`. + */ +export function getMinimumPriceBumpFee(existingFee: bigint, priceBumpPercentage: bigint): bigint { + const bump = (existingFee * priceBumpPercentage) / 100n; + // Ensure the minimum bump is at least 1, so that replacement always requires + // paying strictly more — even with 0% bump or zero existing fee. + const effectiveBump = bump > 0n ? bump : 1n; + return existingFee + effectiveBump; +} + /** * Checks for nullifier conflicts between an incoming transaction and existing pool state. * * When the incoming tx shares nullifiers with existing pending txs: - * - If the incoming tx has strictly higher priority, mark conflicting txs for eviction - * - If any conflicting tx has equal or higher priority, ignore the incoming tx + * - If the incoming tx meets or exceeds the required priority, mark conflicting txs for eviction + * - Otherwise, ignore the incoming tx + * + * When `priceBumpPercentage` is provided (RPC path), uses fee-only comparison with the + * percentage bump instead of `comparePriority`. * * @param incomingMeta - Metadata for the incoming transaction * @param getTxHashByNullifier - Accessor to find which tx uses a nullifier * @param getMetadata - Accessor to get metadata for a tx hash + * @param priceBumpPercentage - Optional percentage bump required for fee-based replacement */ export function checkNullifierConflict( incomingMeta: TxMetaData, getTxHashByNullifier: (nullifier: string) => string | undefined, getMetadata: (txHash: string) => TxMetaData | undefined, + priceBumpPercentage?: bigint, ): PreAddResult { const txHashesToEvict: string[] = []; @@ -225,19 +242,32 @@ export function checkNullifierConflict( continue; } - // If incoming tx has strictly higher priority, mark for eviction - // Otherwise, ignore incoming tx (ties go to existing tx) - // Use comparePriority for deterministic ordering (includes txHash as tiebreaker) - if (comparePriority(incomingMeta, conflictingMeta) > 0) { + // When price bump is set (RPC path), require the incoming fee to meet the bumped threshold. + // Otherwise (P2P path), use full comparePriority with tx hash tiebreaker. + const isHigherPriority = + priceBumpPercentage !== undefined + ? incomingMeta.priorityFee >= getMinimumPriceBumpFee(conflictingMeta.priorityFee, priceBumpPercentage) + : comparePriority(incomingMeta, conflictingMeta) > 0; + + if (isHigherPriority) { txHashesToEvict.push(conflictingHashStr); } else { + const minimumFee = + priceBumpPercentage !== undefined + ? getMinimumPriceBumpFee(conflictingMeta.priorityFee, priceBumpPercentage) + : undefined; return { shouldIgnore: true, txHashesToEvict: [], reason: { code: TxPoolRejectionCode.NULLIFIER_CONFLICT, - message: `Nullifier conflict with existing tx ${conflictingHashStr}`, + message: + minimumFee !== undefined + ? `Nullifier conflict with existing tx ${conflictingHashStr}. Minimum required fee: ${minimumFee}, got: ${incomingMeta.priorityFee}` + : `Nullifier conflict with existing tx ${conflictingHashStr}`, conflictingTxHash: conflictingHashStr, + minimumPriceBumpFee: minimumFee, + txPriorityFee: minimumFee !== undefined ? incomingMeta.priorityFee : undefined, }, }; } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts index 15f6eb4b051e..53e88e0e806e 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts @@ -214,7 +214,9 @@ export class TxPoolV2Impl { // in-memory reads, and buffered DB writes. Nothing here can throw an unhandled exception. const poolAccess = this.#createPreAddPoolAccess(); const preAddContext: PreAddContext | undefined = - opts.feeComparisonOnly !== undefined ? { feeComparisonOnly: opts.feeComparisonOnly } : undefined; + opts.feeComparisonOnly !== undefined + ? { feeComparisonOnly: opts.feeComparisonOnly, priceBumpPercentage: this.#config.priceBumpPercentage } + : undefined; await this.#store.transactionAsync(async () => { for (const tx of txs) { From 54a5ecd3e0e86f7d3a8d6c72d4c378481a5b5eaf Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Mar 2026 09:56:37 +0000 Subject: [PATCH 05/37] refactor: remove update checker, retain version checks (#20898) This PR removes the update checker with a simpler version checker. It also enables network_config.json to specify a latest node version which the node will check against. The node won't restart automatically if the rollup or node version changes but it will print a warning to logs every 10 minutes to inform the operator to restart. Fix A-193 --- spartan/environments/network-defaults.yml | 13 +- yarn-project/aztec/src/bin/index.ts | 4 +- .../aztec/src/cli/aztec_start_action.ts | 8 +- .../aztec/src/cli/aztec_start_options.ts | 3 +- yarn-project/aztec/src/cli/cmds/start_node.ts | 30 ++- .../aztec/src/cli/cmds/start_prover_broker.ts | 15 +- yarn-project/aztec/src/cli/release_version.ts | 21 -- yarn-project/aztec/src/cli/util.ts | 115 +++------ .../cli/src/config/cached_fetch.test.ts | 243 ++++++++++++++++++ yarn-project/cli/src/config/cached_fetch.ts | 150 ++++++++--- yarn-project/cli/src/config/network_config.ts | 2 - yarn-project/foundation/src/config/env_var.ts | 3 +- .../foundation/src/config/network_config.ts | 1 + yarn-project/node-lib/src/config/index.ts | 24 +- .../stdlib/src/update-checker/index.ts | 3 +- .../src/update-checker/package_version.ts | 17 ++ .../src/update-checker/update-checker.test.ts | 194 -------------- .../src/update-checker/update-checker.ts | 166 ------------ .../update-checker/version_checker.test.ts | 80 ++++++ .../src/update-checker/version_checker.ts | 65 +++++ 20 files changed, 609 insertions(+), 548 deletions(-) delete mode 100644 yarn-project/aztec/src/cli/release_version.ts create mode 100644 yarn-project/cli/src/config/cached_fetch.test.ts create mode 100644 yarn-project/stdlib/src/update-checker/package_version.ts delete mode 100644 yarn-project/stdlib/src/update-checker/update-checker.test.ts delete mode 100644 yarn-project/stdlib/src/update-checker/update-checker.ts create mode 100644 yarn-project/stdlib/src/update-checker/version_checker.test.ts create mode 100644 yarn-project/stdlib/src/update-checker/version_checker.ts diff --git a/spartan/environments/network-defaults.yml b/spartan/environments/network-defaults.yml index d6763682e6ed..379c79c7c26e 100644 --- a/spartan/environments/network-defaults.yml +++ b/spartan/environments/network-defaults.yml @@ -232,9 +232,6 @@ networks: # P2P P2P_MAX_PENDING_TX_COUNT: 1000 P2P_TX_POOL_DELETE_TXS_AFTER_REORG: false - # Auto-update - AUTO_UPDATE: none - AUTO_UPDATE_URL: "" # Telemetry PUBLIC_OTEL_OPT_OUT: true PUBLIC_OTEL_EXPORTER_OTLP_METRICS_ENDPOINT: "" @@ -252,6 +249,7 @@ networks: SLASH_UNKNOWN_PENALTY: 10e18 SLASH_INVALID_BLOCK_PENALTY: 10e18 SLASH_GRACE_PERIOD_L2_SLOTS: 0 + ENABLE_VERSION_CHECK: true testnet: <<: *prodlike @@ -296,6 +294,7 @@ networks: SLASH_UNKNOWN_PENALTY: 10e18 SLASH_INVALID_BLOCK_PENALTY: 10e18 SLASH_GRACE_PERIOD_L2_SLOTS: 64 + ENABLE_VERSION_CHECK: true mainnet: <<: *prodlike @@ -338,12 +337,10 @@ networks: # P2P P2P_MAX_PENDING_TX_COUNT: 0 P2P_TX_POOL_DELETE_TXS_AFTER_REORG: true - # Auto-update - AUTO_UPDATE: notify - AUTO_UPDATE_URL: "https://storage.googleapis.com/aztec-mainnet/auto-update/mainnet.json" # Telemetry - PUBLIC_OTEL_EXPORTER_OTLP_METRICS_ENDPOINT: "https://telemetry.alpha-testnet.aztec-labs.com/v1/metrics" - PUBLIC_OTEL_COLLECT_FROM: sequencer + PUBLIC_OTEL_EXPORTER_OTLP_METRICS_ENDPOINT: "" + PUBLIC_OTEL_COLLECT_FROM: "" + ENABLE_VERSION_CHECK: false # Slasher penalties - more lenient initially SLASH_PRUNE_PENALTY: 0 SLASH_DATA_WITHHOLDING_PENALTY: 0 diff --git a/yarn-project/aztec/src/bin/index.ts b/yarn-project/aztec/src/bin/index.ts index c1565d92576f..55d55831457d 100644 --- a/yarn-project/aztec/src/bin/index.ts +++ b/yarn-project/aztec/src/bin/index.ts @@ -11,6 +11,7 @@ import { injectCommands as injectMiscCommands } from '@aztec/cli/misc'; import { injectCommands as injectValidatorKeysCommands } from '@aztec/cli/validator_keys'; import { getActiveNetworkName } from '@aztec/foundation/config'; import { createConsoleLogger, createLogger } from '@aztec/foundation/log'; +import { getPackageVersion } from '@aztec/stdlib/update-checker'; import { Command } from 'commander'; @@ -18,7 +19,6 @@ import { injectCompileCommand } from '../cli/cmds/compile.js'; import { injectMigrateCommand } from '../cli/cmds/migrate_ha_db.js'; import { injectProfileCommand } from '../cli/cmds/profile.js'; import { injectAztecCommands } from '../cli/index.js'; -import { getCliVersion } from '../cli/release_version.js'; const NETWORK_FLAG = 'network'; @@ -47,7 +47,7 @@ async function main() { await enrichEnvironmentWithNetworkConfig(networkName); enrichEnvironmentWithChainName(networkName); - const cliVersion = getCliVersion(); + const cliVersion = getPackageVersion() ?? 'unknown'; let program = new Command('aztec'); program.description('Aztec command line interface').version(cliVersion).enablePositionalOptions(); program = injectAztecCommands(program, userLog, debugLogger); diff --git a/yarn-project/aztec/src/cli/aztec_start_action.ts b/yarn-project/aztec/src/cli/aztec_start_action.ts index 3b966865084e..4304d7160755 100644 --- a/yarn-project/aztec/src/cli/aztec_start_action.ts +++ b/yarn-project/aztec/src/cli/aztec_start_action.ts @@ -1,3 +1,4 @@ +import { getActiveNetworkName } from '@aztec/foundation/config'; import { type NamespacedApiHandlers, createNamespacedSafeJsonRpcServer, @@ -7,13 +8,13 @@ import { import type { LogFn, Logger } from '@aztec/foundation/log'; import type { ChainConfig } from '@aztec/stdlib/config'; import { AztecNodeApiSchema } from '@aztec/stdlib/interfaces/client'; +import { getPackageVersion } from '@aztec/stdlib/update-checker'; import { getVersioningMiddleware } from '@aztec/stdlib/versioning'; import { getOtelJsonRpcPropagationMiddleware } from '@aztec/telemetry-client'; import { createLocalNetwork } from '../local-network/index.js'; import { github, splash } from '../splash.js'; import { resolveAdminApiKey } from './admin_api_key_store.js'; -import { getCliVersion } from './release_version.js'; import { extractNamespacedOptions, installSignalHandlers } from './util.js'; import { getVersions } from './versioning.js'; @@ -25,7 +26,7 @@ export async function aztecStart(options: any, userLog: LogFn, debugLogger: Logg let config: ChainConfig | undefined = undefined; if (options.localNetwork) { - const cliVersion = getCliVersion(); + const cliVersion = getPackageVersion() ?? 'unknown'; const localNetwork = extractNamespacedOptions(options, 'local-network'); localNetwork.testAccounts = true; userLog(`${splash}\n${github}\n\n`); @@ -57,7 +58,8 @@ export async function aztecStart(options: any, userLog: LogFn, debugLogger: Logg if (options.node) { const { startNode } = await import('./cmds/start_node.js'); - ({ config } = await startNode(options, signalHandlers, services, adminServices, userLog)); + const networkName = getActiveNetworkName(options.network); + ({ config } = await startNode(options, signalHandlers, services, adminServices, userLog, networkName)); } else if (options.bot) { const { startBot } = await import('./cmds/start_bot.js'); await startBot(options, signalHandlers, services, userLog); diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 616613c7fb51..863291bf19c1 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -105,8 +105,7 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { env: 'NETWORK', }, - configToFlag('--auto-update', sharedNodeConfigMappings.autoUpdate), - configToFlag('--auto-update-url', sharedNodeConfigMappings.autoUpdateUrl), + configToFlag('--enable-version-check', sharedNodeConfigMappings.enableVersionCheck), configToFlag('--sync-mode', sharedNodeConfigMappings.syncMode), configToFlag('--snapshots-urls', sharedNodeConfigMappings.snapshotsUrls), diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index a034cf3a6f5a..b550f4ee03e4 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -5,8 +5,8 @@ import { getSponsoredFPCAddress } from '@aztec/cli/cli-utils'; import { getL1Config } from '@aztec/cli/config'; import { getPublicClient } from '@aztec/ethereum/client'; import { RegistryContract, RollupContract } from '@aztec/ethereum/contracts'; -import { SecretValue } from '@aztec/foundation/config'; -import { EthAddress } from '@aztec/foundation/eth-address'; +import { type NetworkNames, SecretValue } from '@aztec/foundation/config'; +import type { EthAddress } from '@aztec/foundation/eth-address'; import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { startHttpRpcServer } from '@aztec/foundation/json-rpc/server'; import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici'; @@ -32,7 +32,7 @@ import { extractNamespacedOptions, extractRelevantOptions, preloadCrsDataForVerifying, - setupUpdateMonitor, + setupVersionChecker, } from '../util.js'; import { getVersions } from '../versioning.js'; import { startProverBroker } from './start_prover_broker.js'; @@ -109,6 +109,7 @@ export async function startNode( services: NamespacedApiHandlers, adminServices: NamespacedApiHandlers, userLog: LogFn, + networkName: NetworkNames, ): Promise<{ config: AztecNodeConfig }> { // All options set from environment variables const configFromEnvVars = getConfigEnvVars(); @@ -268,16 +269,19 @@ export async function startNode( await addBot(options, signalHandlers, services, wallet, node, telemetry, undefined); } - if (nodeConfig.autoUpdate !== 'disabled' && nodeConfig.autoUpdateUrl) { - await setupUpdateMonitor( - nodeConfig.autoUpdate, - new URL(nodeConfig.autoUpdateUrl), - followsCanonicalRollup, - getPublicClient(nodeConfig!), - nodeConfig.l1Contracts.registryAddress, - signalHandlers, - async config => node.setConfig((await AztecNodeAdminApiSchema.setConfig.parameters().parseAsync([config]))[0]), - ); + if (nodeConfig.enableVersionCheck && networkName !== 'local') { + const cacheDir = process.env.DATA_DIRECTORY ? `${process.env.DATA_DIRECTORY}/cache` : undefined; + try { + await setupVersionChecker( + networkName, + followsCanonicalRollup, + getPublicClient(nodeConfig!), + signalHandlers, + cacheDir, + ); + } catch { + /* no-op */ + } } return { config: nodeConfig }; diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts index ae3d087b02dd..75c320265f5b 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts @@ -1,5 +1,4 @@ import { getL1Config } from '@aztec/cli/config'; -import { getPublicClient } from '@aztec/ethereum/client'; import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import type { LogFn } from '@aztec/foundation/log'; import { @@ -13,7 +12,7 @@ import { getProverNodeBrokerConfigFromEnv } from '@aztec/prover-node'; import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; import { getConfigEnvVars as getTelemetryClientConfig, initTelemetryClient } from '@aztec/telemetry-client'; -import { extractRelevantOptions, setupUpdateMonitor } from '../util.js'; +import { extractRelevantOptions } from '../util.js'; export async function startProverBroker( options: any, @@ -35,7 +34,6 @@ export async function startProverBroker( throw new Error('L1 registry address is required to start Aztec Node without --deploy-aztec-contracts option'); } - const followsCanonicalRollup = typeof config.rollupVersion !== 'number'; const { addresses, config: rollupConfig } = await getL1Config( config.l1Contracts.registryAddress, config.l1RpcUrls, @@ -49,17 +47,6 @@ export async function startProverBroker( const client = await initTelemetryClient(getTelemetryClientConfig()); const broker = await createAndStartProvingBroker(config, client); - if (options.autoUpdate !== 'disabled' && options.autoUpdateUrl) { - await setupUpdateMonitor( - options.autoUpdate, - new URL(options.autoUpdateUrl), - followsCanonicalRollup, - getPublicClient(config), - config.l1Contracts.registryAddress, - signalHandlers, - ); - } - services.proverBroker = [ broker, config.proverBrokerDebugReplayEnabled ? ProvingJobBrokerSchemaWithDebug : ProvingJobBrokerSchema, diff --git a/yarn-project/aztec/src/cli/release_version.ts b/yarn-project/aztec/src/cli/release_version.ts deleted file mode 100644 index fa00edbe31d9..000000000000 --- a/yarn-project/aztec/src/cli/release_version.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { fileURLToPath } from '@aztec/foundation/url'; - -import { readFileSync } from 'fs'; -import { dirname, resolve } from 'path'; - -export const getCliVersion = () => { - const packageJsonPath = resolve(dirname(fileURLToPath(import.meta.url)), '../../package.json'); - const cliVersion: string = JSON.parse(readFileSync(packageJsonPath).toString()).version; - - // If the version is 0.1.0, this is a placeholder version and we are in a docker container; query release please for the latest version - if (cliVersion === '0.1.0') { - const releasePleasePath = resolve( - dirname(fileURLToPath(import.meta.url)), - '../../../../.release-please-manifest.json', - ); - const releaseVersion = JSON.parse(readFileSync(releasePleasePath).toString())['.']; - return releaseVersion; - } - - return cliVersion; -}; diff --git a/yarn-project/aztec/src/cli/util.ts b/yarn-project/aztec/src/cli/util.ts index 078ff0036cc2..1a908ab6c93a 100644 --- a/yarn-project/aztec/src/cli/util.ts +++ b/yarn-project/aztec/src/cli/util.ts @@ -1,17 +1,18 @@ import type { AztecNodeConfig } from '@aztec/aztec-node'; import type { AccountManager } from '@aztec/aztec.js/wallet'; +import { getNetworkConfig } from '@aztec/cli/config'; +import { RegistryContract } from '@aztec/ethereum/contracts'; import type { ViemClient } from '@aztec/ethereum/types'; -import type { ConfigMappingsType } from '@aztec/foundation/config'; -import { EthAddress } from '@aztec/foundation/eth-address'; +import type { ConfigMappingsType, NetworkNames } from '@aztec/foundation/config'; import { jsonStringify } from '@aztec/foundation/json-rpc'; import { type LogFn, createLogger } from '@aztec/foundation/log'; -import type { SharedNodeConfig } from '@aztec/node-lib/config'; import type { ProverConfig } from '@aztec/stdlib/interfaces/server'; -import { getTelemetryClient } from '@aztec/telemetry-client/start'; +import { type VersionCheck, getPackageVersion } from '@aztec/stdlib/update-checker'; import type { EmbeddedWallet } from '@aztec/wallets/embedded'; import chalk from 'chalk'; import type { Command } from 'commander'; +import type { Hex } from 'viem'; import { type AztecStartOption, aztecStartOptions } from './aztec_start_options.js'; @@ -290,92 +291,58 @@ export async function preloadCrsDataForServerSideProving( } } -export async function setupUpdateMonitor( - autoUpdateMode: SharedNodeConfig['autoUpdate'], - updatesLocation: URL, +export async function setupVersionChecker( + network: NetworkNames, followsCanonicalRollup: boolean, publicClient: ViemClient, - registryContractAddress: EthAddress, signalHandlers: Array<() => Promise>, - updateNodeConfig?: (config: object) => Promise, -) { - const logger = createLogger('update-check'); - const { UpdateChecker } = await import('@aztec/stdlib/update-checker'); - const checker = await UpdateChecker.new({ - baseURL: updatesLocation, - publicClient, - registryContractAddress, - }); + cacheDir?: string, +): Promise { + const networkConfig = await getNetworkConfig(network, cacheDir); + if (!networkConfig) { + return; + } - // eslint-disable-next-line @typescript-eslint/no-misused-promises - checker.on('newRollupVersion', async ({ latestVersion, currentVersion }) => { - if (isShuttingDown()) { - return; - } + const { VersionChecker } = await import('@aztec/stdlib/update-checker'); - // if node follows canonical rollup then this is equivalent to a config update - if (!followsCanonicalRollup) { - return; - } + const logger = createLogger('version_check'); + const registry = new RegistryContract(publicClient, networkConfig.registryAddress as Hex); - if (autoUpdateMode === 'config' || autoUpdateMode === 'config-and-version') { - logger.info(`New rollup version detected. Please restart the node`, { latestVersion, currentVersion }); - await shutdown(logger.info, ExitCode.ROLLUP_UPGRADE, signalHandlers); - } else if (autoUpdateMode === 'notify') { - logger.warn(`New rollup detected. Please restart the node`, { latestVersion, currentVersion }); - } + const checks: Array = []; + checks.push({ + name: 'node', + currentVersion: getPackageVersion() ?? 'unknown', + getLatestVersion: async () => { + const cfg = await getNetworkConfig(network, cacheDir); + return cfg?.nodeVersion; + }, }); - // eslint-disable-next-line @typescript-eslint/no-misused-promises - checker.on('newNodeVersion', async ({ latestVersion, currentVersion }) => { - if (isShuttingDown()) { - return; - } - if (autoUpdateMode === 'config-and-version') { - logger.info(`New node version detected. Please update and restart the node`, { latestVersion, currentVersion }); - await shutdown(logger.info, ExitCode.VERSION_UPGRADE, signalHandlers); - } else if (autoUpdateMode === 'notify') { - logger.info(`New node version detected. Please update and restart the node`, { latestVersion, currentVersion }); + if (followsCanonicalRollup) { + const getLatestVersion = async () => { + const version = (await registry.getRollupVersions()).at(-1); + return version !== undefined ? String(version) : undefined; + }; + const currentVersion = await getLatestVersion(); + if (currentVersion !== undefined) { + checks.push({ + name: 'rollup', + currentVersion, + getLatestVersion, + }); } - }); + } - // eslint-disable-next-line @typescript-eslint/no-misused-promises - checker.on('updateNodeConfig', async config => { + const checker = new VersionChecker(checks, 600_000, logger); + checker.on('newVersion', ({ name, latestVersion, currentVersion }) => { if (isShuttingDown()) { return; } - if ((autoUpdateMode === 'config' || autoUpdateMode === 'config-and-version') && updateNodeConfig) { - logger.warn(`Config change detected. Updating node`, config); - try { - await updateNodeConfig(config); - } catch (err) { - logger.warn('Failed to update config', { err }); - } - } - // don't notify on these config changes - }); - - checker.on('updatePublicTelemetryConfig', config => { - if (autoUpdateMode === 'config' || autoUpdateMode === 'config-and-version') { - logger.warn(`Public telemetry config change detected. Updating telemetry client`, config); - try { - const publicIncludeMetrics: unknown = (config as any).publicIncludeMetrics; - if (Array.isArray(publicIncludeMetrics) && publicIncludeMetrics.every(m => typeof m === 'string')) { - getTelemetryClient().setExportedPublicTelemetry(publicIncludeMetrics); - } - const publicMetricsCollectFrom: unknown = (config as any).publicMetricsCollectFrom; - if (Array.isArray(publicMetricsCollectFrom) && publicMetricsCollectFrom.every(m => typeof m === 'string')) { - getTelemetryClient().setPublicTelemetryCollectFrom(publicMetricsCollectFrom); - } - } catch (err) { - logger.warn('Failed to update config', { err }); - } - } - // don't notify on these config changes + logger.warn(`New ${name} version available`, { latestVersion, currentVersion }); }); - checker.start(); + signalHandlers.push(() => checker.stop()); } export function stringifyConfig(config: object): string { diff --git a/yarn-project/cli/src/config/cached_fetch.test.ts b/yarn-project/cli/src/config/cached_fetch.test.ts new file mode 100644 index 000000000000..186bdb4ae2bf --- /dev/null +++ b/yarn-project/cli/src/config/cached_fetch.test.ts @@ -0,0 +1,243 @@ +import { jest } from '@jest/globals'; +import { mkdir, readFile, rm, writeFile } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { cachedFetch, parseMaxAge } from './cached_fetch.js'; + +describe('cachedFetch', () => { + let tempDir: string; + let cacheFile: string; + let metaFile: string; + let mockFetch: jest.Mock; + const noopLog: any = { trace: () => {}, warn: () => {}, info: () => {} }; + + beforeEach(async () => { + tempDir = join(tmpdir(), `cached-fetch-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + await mkdir(tempDir, { recursive: true }); + cacheFile = join(tempDir, 'cache.json'); + metaFile = cacheFile + '.meta'; + mockFetch = jest.fn(); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + function mockResponse(body: any, init?: { status?: number; headers?: Record }): Response { + const status = init?.status ?? 200; + const headers = new Headers(init?.headers ?? {}); + return { + ok: status >= 200 && status < 300, + status, + statusText: status === 304 ? 'Not Modified' : 'OK', + headers, + json: () => Promise.resolve(body), + } as Response; + } + + async function writeCacheFiles(data: any, opts?: { etag?: string; expiresAt?: number }) { + await writeFile(cacheFile, JSON.stringify(data), 'utf-8'); + await writeFile( + metaFile, + JSON.stringify({ etag: opts?.etag, expiresAt: opts?.expiresAt ?? Date.now() + 60_000 }), + 'utf-8', + ); + } + + it('returns cached data without fetching when cache is fresh', async () => { + const data = { key: 'cached-value' }; + await writeCacheFiles(data, { expiresAt: Date.now() + 60_000 }); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(data); + expect(mockFetch).not.toHaveBeenCalled(); + }); + + it('sends conditional request with If-None-Match when cache is stale and has ETag', async () => { + const data = { key: 'stale-value' }; + await writeCacheFiles(data, { etag: '"abc123"', expiresAt: Date.now() - 1000 }); + + mockFetch.mockResolvedValue( + mockResponse(null, { + status: 304, + headers: { 'cache-control': 'max-age=300' }, + }), + ); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(data); + expect(mockFetch).toHaveBeenCalledWith('https://example.com/data.json', { + headers: { 'If-None-Match': '"abc123"' }, + }); + + // Data file should be unchanged + expect(JSON.parse(await readFile(cacheFile, 'utf-8'))).toEqual(data); + // Meta file should have updated expiry + const meta = JSON.parse(await readFile(metaFile, 'utf-8')); + expect(meta.expiresAt).toBeGreaterThan(Date.now()); + }); + + it('returns new data and stores ETag on 200 response', async () => { + const staleData = { key: 'old' }; + const freshData = { key: 'new' }; + await writeCacheFiles(staleData, { etag: '"old-etag"', expiresAt: Date.now() - 1000 }); + + mockFetch.mockResolvedValue( + mockResponse(freshData, { + status: 200, + headers: { etag: '"new-etag"', 'cache-control': 'max-age=600' }, + }), + ); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(freshData); + + // Data file should have new data (raw JSON) + expect(JSON.parse(await readFile(cacheFile, 'utf-8'))).toEqual(freshData); + // Meta file should have new ETag and expiry + const meta = JSON.parse(await readFile(metaFile, 'utf-8')); + expect(meta.etag).toBe('"new-etag"'); + expect(meta.expiresAt).toBeGreaterThan(Date.now()); + }); + + it('fetches normally without caching when no cacheFile is provided', async () => { + const data = { key: 'no-cache' }; + mockFetch.mockResolvedValue(mockResponse(data)); + + const result = await cachedFetch('https://example.com/data.json', {}, mockFetch, noopLog); + + expect(result).toEqual(data); + expect(mockFetch).toHaveBeenCalledWith('https://example.com/data.json'); + }); + + it('falls back to normal fetch when metadata file is missing', async () => { + // Write only data file, no meta file (simulates upgrade from old code) + await writeFile(cacheFile, JSON.stringify({ key: 'old-format' }), 'utf-8'); + + const freshData = { key: 'fresh' }; + mockFetch.mockResolvedValue( + mockResponse(freshData, { + status: 200, + headers: { 'cache-control': 'max-age=300' }, + }), + ); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(freshData); + // Should have fetched without If-None-Match since no meta + expect(mockFetch).toHaveBeenCalledWith('https://example.com/data.json', { headers: {} }); + }); + + it('falls back to normal fetch when metadata file is corrupt', async () => { + await writeFile(cacheFile, JSON.stringify({ key: 'data' }), 'utf-8'); + await writeFile(metaFile, 'not-json!!!', 'utf-8'); + + const freshData = { key: 'fresh' }; + mockFetch.mockResolvedValue( + mockResponse(freshData, { + status: 200, + headers: { 'cache-control': 'max-age=300' }, + }), + ); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(freshData); + expect(mockFetch).toHaveBeenCalledWith('https://example.com/data.json', { headers: {} }); + }); + + it('falls back to normal fetch when data file is missing but metadata exists', async () => { + await writeFile(metaFile, JSON.stringify({ etag: '"abc"', expiresAt: Date.now() + 60_000 }), 'utf-8'); + + const freshData = { key: 'fresh' }; + mockFetch.mockResolvedValue( + mockResponse(freshData, { + status: 200, + headers: { 'cache-control': 'max-age=300' }, + }), + ); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(freshData); + // Should not send If-None-Match since data is missing + expect(mockFetch).toHaveBeenCalledWith('https://example.com/data.json', { headers: {} }); + }); + + it('uses defaultMaxAgeMs when server sends no Cache-Control header', async () => { + const data = { key: 'value' }; + mockFetch.mockResolvedValue( + mockResponse(data, { + status: 200, + headers: { etag: '"some-etag"' }, + }), + ); + + const defaultMaxAgeMs = 120_000; // 2 minutes + const before = Date.now(); + await cachedFetch('https://example.com/data.json', { cacheFile, defaultMaxAgeMs }, mockFetch, noopLog); + + const meta = JSON.parse(await readFile(metaFile, 'utf-8')); + expect(meta.expiresAt).toBeGreaterThanOrEqual(before + defaultMaxAgeMs); + expect(meta.expiresAt).toBeLessThanOrEqual(Date.now() + defaultMaxAgeMs); + }); + + it('returns stale cache data when fetch fails', async () => { + const data = { key: 'stale-fallback' }; + await writeCacheFiles(data, { expiresAt: Date.now() - 1000 }); + + mockFetch.mockRejectedValue(new Error('Network error')); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(data); + }); + + it('returns stale cache data when server returns non-ok status', async () => { + const data = { key: 'stale-server-error' }; + await writeCacheFiles(data, { expiresAt: Date.now() - 1000 }); + + mockFetch.mockResolvedValue(mockResponse(null, { status: 500 })); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toEqual(data); + }); + + it('returns undefined when fetch fails and no cache exists', async () => { + mockFetch.mockRejectedValue(new Error('Network error')); + + const result = await cachedFetch('https://example.com/data.json', { cacheFile }, mockFetch, noopLog); + + expect(result).toBeUndefined(); + }); +}); + +describe('parseMaxAge', () => { + it('extracts max-age from Cache-Control header', () => { + const response = { headers: { get: (name: string) => (name === 'cache-control' ? 'max-age=300' : null) } }; + expect(parseMaxAge(response)).toBe(300_000); + }); + + it('handles max-age with other directives', () => { + const response = { + headers: { get: (name: string) => (name === 'cache-control' ? 'public, max-age=600, must-revalidate' : null) }, + }; + expect(parseMaxAge(response)).toBe(600_000); + }); + + it('returns undefined when no Cache-Control header', () => { + const response = { headers: { get: () => null } }; + expect(parseMaxAge(response)).toBeUndefined(); + }); + + it('returns undefined when no max-age in Cache-Control', () => { + const response = { headers: { get: (name: string) => (name === 'cache-control' ? 'no-cache' : null) } }; + expect(parseMaxAge(response)).toBeUndefined(); + }); +}); diff --git a/yarn-project/cli/src/config/cached_fetch.ts b/yarn-project/cli/src/config/cached_fetch.ts index 74518805c11f..37c745f94a50 100644 --- a/yarn-project/cli/src/config/cached_fetch.ts +++ b/yarn-project/cli/src/config/cached_fetch.ts @@ -1,24 +1,48 @@ import { createLogger } from '@aztec/aztec.js/log'; -import { mkdir, readFile, stat, writeFile } from 'fs/promises'; +import { mkdir, readFile, writeFile } from 'fs/promises'; import { dirname } from 'path'; export interface CachedFetchOptions { - /** Cache duration in milliseconds */ - cacheDurationMs: number; - /** The cache file */ + /** The cache file path for storing data. If not provided, no caching is performed. */ cacheFile?: string; + /** Fallback max-age in milliseconds when server sends no Cache-Control header. Defaults to 5 minutes. */ + defaultMaxAgeMs?: number; +} + +/** Cache metadata stored in a sidecar .meta file alongside the data file. */ +interface CacheMeta { + etag?: string; + expiresAt: number; +} + +const DEFAULT_MAX_AGE_MS = 5 * 60 * 1000; // 5 minutes + +/** Extracts max-age value in milliseconds from a Response's Cache-Control header. Returns undefined if not present. */ +export function parseMaxAge(response: { headers: { get(name: string): string | null } }): number | undefined { + const cacheControl = response.headers.get('cache-control'); + if (!cacheControl) { + return undefined; + } + const match = cacheControl.match(/max-age=(\d+)/); + if (!match) { + return undefined; + } + return parseInt(match[1], 10) * 1000; } /** - * Fetches data from a URL with file-based caching support. - * This utility can be used by both remote config and bootnodes fetching. + * Fetches data from a URL with file-based HTTP conditional caching. + * + * Data is stored as raw JSON in the cache file (same format as the server returns). + * Caching metadata (ETag, expiry) is stored in a separate sidecar `.meta` file. + * This keeps the data file human-readable and backward-compatible with older code. * * @param url - The URL to fetch from - * @param networkName - Network name for cache directory structure - * @param options - Caching and error handling options - * @param cacheDir - Optional cache directory (defaults to no caching) - * @returns The fetched and parsed JSON data, or undefined if fetch fails and throwOnError is false + * @param options - Caching options + * @param fetch - Fetch implementation (defaults to globalThis.fetch) + * @param log - Logger instance + * @returns The fetched and parsed JSON data, or undefined if fetch fails */ export async function cachedFetch( url: string, @@ -26,42 +50,106 @@ export async function cachedFetch( fetch = globalThis.fetch, log = createLogger('cached_fetch'), ): Promise { - const { cacheDurationMs, cacheFile } = options; + const { cacheFile, defaultMaxAgeMs = DEFAULT_MAX_AGE_MS } = options; + + // If no cacheFile, just fetch normally without caching + if (!cacheFile) { + return fetchAndParse(url, fetch, log); + } + + const metaFile = cacheFile + '.meta'; - // Try to read from cache first + // Try to read metadata + let meta: CacheMeta | undefined; try { - if (cacheFile) { - const info = await stat(cacheFile); - if (info.mtimeMs + cacheDurationMs > Date.now()) { - const cachedData = JSON.parse(await readFile(cacheFile, 'utf-8')); - return cachedData; - } - } + meta = JSON.parse(await readFile(metaFile, 'utf-8')); } catch { - log.trace('Failed to read data from cache'); + log.trace('No usable cache metadata found'); } + // Try to read cached data + let cachedData: T | undefined; try { - const response = await fetch(url); + cachedData = JSON.parse(await readFile(cacheFile, 'utf-8')); + } catch { + log.trace('No usable cached data found'); + } + + // If metadata and data exist and cache is fresh, return directly + if (meta && cachedData !== undefined && meta.expiresAt > Date.now()) { + return cachedData; + } + + // Cache is stale or missing — make a (possibly conditional) request + try { + const headers: Record = {}; + if (meta?.etag && cachedData !== undefined) { + headers['If-None-Match'] = meta.etag; + } + + const response = await fetch(url, { headers }); + + if (response.status === 304 && cachedData !== undefined) { + // Not modified — recompute expiry from new response headers and return cached data + const maxAgeMs = parseMaxAge(response) ?? defaultMaxAgeMs; + await writeMetaFile(metaFile, { etag: meta?.etag, expiresAt: Date.now() + maxAgeMs }, log); + return cachedData; + } + if (!response.ok) { log.warn(`Failed to fetch from ${url}: ${response.status} ${response.statusText}`); - return undefined; + return cachedData; } - const data = await response.json(); + // 200 — parse new data and cache it + const data = (await response.json()) as T; + const maxAgeMs = parseMaxAge(response) ?? defaultMaxAgeMs; + const etag = response.headers.get('etag') ?? undefined; - try { - if (cacheFile) { - await mkdir(dirname(cacheFile), { recursive: true }); - await writeFile(cacheFile, JSON.stringify(data), 'utf-8'); - } - } catch (err) { - log.warn('Failed to cache data on disk: ' + cacheFile, { cacheFile, err }); - } + await ensureDir(cacheFile, log); + await Promise.all([ + writeFile(cacheFile, JSON.stringify(data), 'utf-8'), + writeFile(metaFile, JSON.stringify({ etag, expiresAt: Date.now() + maxAgeMs }), 'utf-8'), + ]); return data; + } catch (err) { + log.warn(`Failed to fetch from ${url}`, { err }); + return cachedData; + } +} + +async function fetchAndParse( + url: string, + fetch: typeof globalThis.fetch, + log: ReturnType, +): Promise { + try { + const response = await fetch(url); + if (!response.ok) { + log.warn(`Failed to fetch from ${url}: ${response.status} ${response.statusText}`); + return undefined; + } + return (await response.json()) as T; } catch (err) { log.warn(`Failed to fetch from ${url}`, { err }); return undefined; } } + +async function ensureDir(filePath: string, log: ReturnType) { + try { + await mkdir(dirname(filePath), { recursive: true }); + } catch (err) { + log.warn('Failed to create cache directory for: ' + filePath, { err }); + } +} + +async function writeMetaFile(metaFile: string, meta: CacheMeta, log: ReturnType) { + try { + await mkdir(dirname(metaFile), { recursive: true }); + await writeFile(metaFile, JSON.stringify(meta), 'utf-8'); + } catch (err) { + log.warn('Failed to write cache metadata: ' + metaFile, { err }); + } +} diff --git a/yarn-project/cli/src/config/network_config.ts b/yarn-project/cli/src/config/network_config.ts index a8a9c59b8757..4e835919014a 100644 --- a/yarn-project/cli/src/config/network_config.ts +++ b/yarn-project/cli/src/config/network_config.ts @@ -9,7 +9,6 @@ import { enrichEthAddressVar, enrichVar } from './enrich_env.js'; const DEFAULT_CONFIG_URL = 'https://raw.githubusercontent.com/AztecProtocol/networks/refs/heads/main/network_config.json'; const FALLBACK_CONFIG_URL = 'https://metadata.aztec.network/network_config.json'; -const NETWORK_CONFIG_CACHE_DURATION_MS = 60 * 60 * 1000; // 1 hour /** * Fetches remote network configuration from GitHub with caching support. @@ -87,7 +86,6 @@ async function fetchNetworkConfigFromUrl( if (url.protocol === 'http:' || url.protocol === 'https:') { rawConfig = await cachedFetch(url.href, { - cacheDurationMs: NETWORK_CONFIG_CACHE_DURATION_MS, cacheFile: cacheDir ? join(cacheDir, networkName, 'network_config.json') : undefined, }); } else if (url.protocol === 'file:') { diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 666eb044454d..a5fedef5cce2 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -341,9 +341,8 @@ export type EnvVar = | 'K8S_POD_NAME' | 'K8S_POD_UID' | 'K8S_NAMESPACE_NAME' + | 'ENABLE_VERSION_CHECK' | 'VALIDATOR_REEXECUTE_DEADLINE_MS' - | 'AUTO_UPDATE' - | 'AUTO_UPDATE_URL' | 'WEB3_SIGNER_URL' | 'SKIP_ARCHIVER_INITIAL_SYNC' | 'BLOB_ALLOW_EMPTY_SOURCES' diff --git a/yarn-project/foundation/src/config/network_config.ts b/yarn-project/foundation/src/config/network_config.ts index 18cf67a2df7f..43d7e0a2f90c 100644 --- a/yarn-project/foundation/src/config/network_config.ts +++ b/yarn-project/foundation/src/config/network_config.ts @@ -10,6 +10,7 @@ export const NetworkConfigSchema = z l1ChainId: z.number(), blockDurationMs: z.number().positive().optional(), txPublicSetupAllowListExtend: z.string().optional(), + nodeVersion: z.string().optional(), }) .passthrough(); // Allow additional unknown fields to pass through diff --git a/yarn-project/node-lib/src/config/index.ts b/yarn-project/node-lib/src/config/index.ts index c8403d2b09bc..8ae5c66d42f0 100644 --- a/yarn-project/node-lib/src/config/index.ts +++ b/yarn-project/node-lib/src/config/index.ts @@ -9,12 +9,6 @@ export type SharedNodeConfig = { syncMode: 'full' | 'snapshot' | 'force-snapshot'; /** Base URLs for snapshots index. Index file will be searched at `SNAPSHOTS_BASE_URL/aztec-L1_CHAIN_ID-VERSION-ROLLUP_ADDRESS/index.json` */ snapshotsUrls?: string[]; - - /** Auto update mode: disabled - to completely ignore remote signals to update the node. enabled - to respect the signals (potentially shutting this node down). log - check for updates but log a warning instead of applying them*/ - autoUpdate?: 'disabled' | 'notify' | 'config' | 'config-and-version'; - /** The base URL against which to check for updates */ - autoUpdateUrl?: string; - /** URL of the Web3Signer instance */ web3SignerUrl?: string; /** Whether to run in fisherman mode */ @@ -22,6 +16,9 @@ export type SharedNodeConfig = { /** Force verification of tx Chonk proofs. Only used for testnet */ debugForceTxProofVerification: boolean; + + /** Check if the node version matches the latest version for the network */ + enableVersionCheck: boolean; }; export const sharedNodeConfigMappings: ConfigMappingsType = { @@ -52,15 +49,6 @@ export const sharedNodeConfigMappings: ConfigMappingsType = { fallback: ['SYNC_SNAPSHOTS_URL'], defaultValue: [], }, - autoUpdate: { - env: 'AUTO_UPDATE', - description: 'The auto update mode for this node', - defaultValue: 'disabled', - }, - autoUpdateUrl: { - env: 'AUTO_UPDATE_URL', - description: 'Base URL to check for updates', - }, web3SignerUrl: { env: 'WEB3_SIGNER_URL', description: 'URL of the Web3Signer instance', @@ -76,4 +64,10 @@ export const sharedNodeConfigMappings: ConfigMappingsType = { description: 'Whether to force tx proof verification. Only has an effect if real proving is turned off', ...booleanConfigHelper(false), }, + + enableVersionCheck: { + env: 'ENABLE_VERSION_CHECK', + description: 'Check if the node is running the latest version and is following the latest rollup', + ...booleanConfigHelper(true), + }, }; diff --git a/yarn-project/stdlib/src/update-checker/index.ts b/yarn-project/stdlib/src/update-checker/index.ts index 958afdb51dd2..65d4570f3d1a 100644 --- a/yarn-project/stdlib/src/update-checker/index.ts +++ b/yarn-project/stdlib/src/update-checker/index.ts @@ -1 +1,2 @@ -export { UpdateChecker, getPackageVersion } from './update-checker.js'; +export * from './package_version.js'; +export * from './version_checker.js'; diff --git a/yarn-project/stdlib/src/update-checker/package_version.ts b/yarn-project/stdlib/src/update-checker/package_version.ts new file mode 100644 index 000000000000..c186b4de9bba --- /dev/null +++ b/yarn-project/stdlib/src/update-checker/package_version.ts @@ -0,0 +1,17 @@ +import { fileURLToPath } from '@aztec/foundation/url'; + +import { readFileSync } from 'fs'; +import { dirname, resolve } from 'path'; + +/** Returns the package version from the release-please manifest, or undefined if not found. */ +export function getPackageVersion(): string | undefined { + try { + const releasePleaseManifestPath = resolve( + dirname(fileURLToPath(import.meta.url)), + '../../../../.release-please-manifest.json', + ); + return JSON.parse(readFileSync(releasePleaseManifestPath).toString())['.']; + } catch { + return undefined; + } +} diff --git a/yarn-project/stdlib/src/update-checker/update-checker.test.ts b/yarn-project/stdlib/src/update-checker/update-checker.test.ts deleted file mode 100644 index 890a5080d0d3..000000000000 --- a/yarn-project/stdlib/src/update-checker/update-checker.test.ts +++ /dev/null @@ -1,194 +0,0 @@ -import { randomBigInt } from '@aztec/foundation/crypto/random'; - -import { jest } from '@jest/globals'; - -import { type EventMap, UpdateChecker } from './update-checker.js'; - -describe('UpdateChecker', () => { - let checker: UpdateChecker; - let fetch: jest.Mock; - let getCanonicalRollupVersion: jest.Mock<() => Promise>; - let rollupVersionAtStart: bigint; - let nodeVersionAtStart: string; - let eventHandlers: { - [K in keyof EventMap]: jest.Mock<(...args: EventMap[K]) => void>; - }; - - beforeEach(() => { - nodeVersionAtStart = '0.1.0'; - rollupVersionAtStart = randomBigInt(1000n); - fetch = jest.fn(() => Promise.resolve(new Response(JSON.stringify({ version: nodeVersionAtStart })))); - getCanonicalRollupVersion = jest.fn(() => Promise.resolve(rollupVersionAtStart)); - - checker = new UpdateChecker( - new URL('http://localhost'), - nodeVersionAtStart, - rollupVersionAtStart, - fetch, - getCanonicalRollupVersion, - 100, - ); - - eventHandlers = { - updateNodeConfig: jest.fn(), - newNodeVersion: jest.fn(), - newRollupVersion: jest.fn(), - updatePublicTelemetryConfig: jest.fn(), - }; - - for (const [event, fn] of Object.entries(eventHandlers)) { - checker.on(event as keyof EventMap, fn); - } - }); - - it.each([ - ['it detects no change', () => {}], - [ - 'fetching config fails', - () => { - fetch.mockRejectedValue(new Error('test error')); - }, - ], - [ - 'fetching rollup address fails', - () => { - getCanonicalRollupVersion.mockRejectedValue(new Error('test error')); - }, - ], - [ - 'the config does not match the schema', - () => { - fetch.mockResolvedValue( - new Response( - JSON.stringify({ - foo: 'bar', - }), - ), - ); - }, - ], - [ - 'the config does not match the schema', - () => { - fetch.mockResolvedValue( - new Response( - JSON.stringify({ - version: 1, - }), - ), - ); - }, - ], - ])('does not emit an event if %s', async (_, patchFn) => { - patchFn(); - for (let run = 0; run < 5; run++) { - await expect(checker.trigger()).resolves.toBeUndefined(); - for (const fn of Object.values(eventHandlers)) { - expect(fn).not.toHaveBeenCalled(); - } - } - }); - - it.each<[keyof EventMap, () => void]>([ - [ - 'newRollupVersion', - () => { - // ensure the new version is completely different to the previous one - getCanonicalRollupVersion.mockResolvedValueOnce(1000n + randomBigInt(1000n)); - }, - ], - [ - 'newNodeVersion', - () => { - fetch.mockResolvedValueOnce(new Response(JSON.stringify({ version: '0.1.0-foo' }))); - }, - ], - [ - 'updateNodeConfig', - () => { - fetch.mockResolvedValueOnce(new Response(JSON.stringify({ config: { maxTxsPerBlock: 16 } }))); - }, - ], - [ - 'updatePublicTelemetryConfig', - () => { - fetch.mockResolvedValueOnce( - new Response(JSON.stringify({ publicTelemetry: { publicIncludeMetrics: ['aztec'] } })), - ); - }, - ], - ])('emits event: %s', async (event, patchFn) => { - patchFn(); - await expect(checker.trigger()).resolves.toBeUndefined(); - expect(eventHandlers[event]).toHaveBeenCalled(); - }); - - it('calls updateConfig only when config changes', async () => { - fetch.mockResolvedValue( - new Response( - JSON.stringify({ - version: nodeVersionAtStart, - config: { - foo: 'bar', - }, - }), - ), - ); - - await checker.trigger(); - expect(eventHandlers.updateNodeConfig).toHaveBeenCalledTimes(1); - - await checker.trigger(); - expect(eventHandlers.updateNodeConfig).toHaveBeenCalledTimes(1); - - fetch.mockResolvedValue( - new Response( - JSON.stringify({ - version: nodeVersionAtStart, - config: { - bar: 'baz', - }, - }), - ), - ); - - await checker.trigger(); - expect(eventHandlers.updateNodeConfig).toHaveBeenCalledTimes(2); - }); - - it('calls updatePublicTelemetryConfig only when config changes', async () => { - fetch.mockResolvedValue( - new Response( - JSON.stringify({ - publicTelemetry: { - publicIncludeMetrics: ['aztec'], - }, - }), - ), - ); - - await checker.trigger(); - expect(eventHandlers.updatePublicTelemetryConfig).toHaveBeenCalledTimes(1); - - await checker.trigger(); - expect(eventHandlers.updatePublicTelemetryConfig).toHaveBeenCalledTimes(1); - - fetch.mockResolvedValue( - new Response( - JSON.stringify({ - publicTelemetry: { - publicIncludeMetrics: ['aztec.validator'], - }, - }), - ), - ); - - await checker.trigger(); - expect(eventHandlers.updatePublicTelemetryConfig).toHaveBeenCalledTimes(2); - }); - - it('reaches out to the expected config URL', async () => { - await checker.trigger(); - expect(fetch).toHaveBeenCalledWith(new URL(`http://localhost`)); - }); -}); diff --git a/yarn-project/stdlib/src/update-checker/update-checker.ts b/yarn-project/stdlib/src/update-checker/update-checker.ts deleted file mode 100644 index 3bf27f948599..000000000000 --- a/yarn-project/stdlib/src/update-checker/update-checker.ts +++ /dev/null @@ -1,166 +0,0 @@ -import { RegistryContract } from '@aztec/ethereum/contracts'; -import type { ViemClient } from '@aztec/ethereum/types'; -import { EthAddress } from '@aztec/foundation/eth-address'; -import { createLogger } from '@aztec/foundation/log'; -import { RunningPromise } from '@aztec/foundation/running-promise'; -import { fileURLToPath } from '@aztec/foundation/url'; - -import { EventEmitter } from 'events'; -import { readFileSync } from 'fs'; -import { dirname, resolve } from 'path'; -import { isDeepStrictEqual } from 'util'; -import { z } from 'zod'; - -const updateConfigSchema = z.object({ - version: z.string().optional(), - publicTelemetry: z.any().optional(), - config: z.any().optional(), -}); - -export type EventMap = { - newRollupVersion: [{ currentVersion: bigint; latestVersion: bigint }]; - newNodeVersion: [{ currentVersion: string; latestVersion: string }]; - updateNodeConfig: [object]; - updatePublicTelemetryConfig: [object]; -}; - -type Config = { - baseURL: URL; - nodeVersion?: string; - checkIntervalMs?: number; - registryContractAddress: EthAddress; - publicClient: ViemClient; - fetch?: typeof fetch; -}; - -export class UpdateChecker extends EventEmitter { - private runningPromise: RunningPromise; - private lastPatchedConfig: object = {}; - private lastPatchedPublicTelemetryConfig: object = {}; - - constructor( - private updatesUrl: URL, - private nodeVersion: string | undefined, - private rollupVersion: bigint, - private fetch: typeof globalThis.fetch, - private getLatestRollupVersion: () => Promise, - private checkIntervalMs = 10 * 60_000, // every 10 mins - private log = createLogger('foundation:update-check'), - ) { - super(); - this.runningPromise = new RunningPromise(this.runChecks, this.log, this.checkIntervalMs); - } - - public static async new(config: Config): Promise { - const registryContract = new RegistryContract(config.publicClient, config.registryContractAddress); - const getLatestRollupVersion = () => registryContract.getRollupVersions().then(versions => versions.at(-1)!); - - return new UpdateChecker( - config.baseURL, - config.nodeVersion ?? getPackageVersion(), - await getLatestRollupVersion(), - config.fetch ?? fetch, - getLatestRollupVersion, - config.checkIntervalMs, - ); - } - - public start(): void { - if (this.runningPromise.isRunning()) { - this.log.debug(`Can't start update checker again`); - return; - } - - this.log.info('Starting update checker', { - nodeVersion: this.nodeVersion, - rollupVersion: this.rollupVersion, - }); - this.runningPromise.start(); - } - - public stop(): Promise { - if (!this.runningPromise.isRunning()) { - this.log.debug(`Can't stop update checker because it is not running`); - return Promise.resolve(); - } - return this.runningPromise.stop(); - } - - public trigger(): Promise { - return this.runningPromise.trigger(); - } - - private runChecks = async (): Promise => { - await Promise.all([this.checkRollupVersion(), this.checkConfig()]); - }; - - private async checkRollupVersion(): Promise { - try { - const canonicalRollupVersion = await this.getLatestRollupVersion(); - if (canonicalRollupVersion !== this.rollupVersion) { - this.log.debug('New canonical rollup version', { - currentVersion: this.rollupVersion, - latestVersion: canonicalRollupVersion, - }); - this.emit('newRollupVersion', { currentVersion: this.rollupVersion, latestVersion: canonicalRollupVersion }); - } - } catch (err) { - this.log.warn(`Failed to check if there is a new rollup`, err); - } - } - - private async checkConfig(): Promise { - try { - const response = await this.fetch(this.updatesUrl); - const body = await response.json(); - if (!response.ok) { - this.log.warn(`Unexpected HTTP response checking for updates`, { - status: response.status, - body: await response.text(), - url: this.updatesUrl, - }); - } - - const { version, config, publicTelemetry } = updateConfigSchema.parse(body); - - if (this.nodeVersion && version && version !== this.nodeVersion) { - this.log.debug('New node version', { currentVersion: this.nodeVersion, latestVersion: version }); - this.emit('newNodeVersion', { currentVersion: this.nodeVersion, latestVersion: version }); - } - - if (config && Object.keys(config).length > 0 && !isDeepStrictEqual(config, this.lastPatchedConfig)) { - this.log.debug('New node config', { config }); - this.lastPatchedConfig = config; - this.emit('updateNodeConfig', config); - } - - if ( - publicTelemetry && - Object.keys(publicTelemetry).length > 0 && - !isDeepStrictEqual(publicTelemetry, this.lastPatchedPublicTelemetryConfig) - ) { - this.log.debug('New metrics config', { config }); - this.lastPatchedPublicTelemetryConfig = publicTelemetry; - this.emit('updatePublicTelemetryConfig', publicTelemetry); - } - } catch (err) { - this.log.warn(`Failed to check if there is an update`, err); - } - } -} - -/** - * Returns package version. - */ -export function getPackageVersion(): string | undefined { - try { - const releasePleaseManifestPath = resolve( - dirname(fileURLToPath(import.meta.url)), - '../../../../.release-please-manifest.json', - ); - const version = JSON.parse(readFileSync(releasePleaseManifestPath).toString())['.']; - return version; - } catch { - return undefined; - } -} diff --git a/yarn-project/stdlib/src/update-checker/version_checker.test.ts b/yarn-project/stdlib/src/update-checker/version_checker.test.ts new file mode 100644 index 000000000000..b5f8b8029b5e --- /dev/null +++ b/yarn-project/stdlib/src/update-checker/version_checker.test.ts @@ -0,0 +1,80 @@ +import { jest } from '@jest/globals'; + +import { type EventMap, type VersionCheck, VersionChecker } from './version_checker.js'; + +describe('VersionChecker', () => { + let checker: VersionChecker; + let getLatestNodeVersion: jest.Mock<() => Promise>; + let getLatestRollupVersion: jest.Mock<() => Promise>; + let eventHandler: jest.Mock<(...args: EventMap['newVersion']) => void>; + + beforeEach(() => { + getLatestNodeVersion = jest.fn(() => Promise.resolve('0.1.0')); + getLatestRollupVersion = jest.fn(() => Promise.resolve('42')); + + const checks: VersionCheck[] = [ + { name: 'node', currentVersion: '0.1.0', getLatestVersion: getLatestNodeVersion }, + { name: 'rollup', currentVersion: '42', getLatestVersion: getLatestRollupVersion }, + ]; + + checker = new VersionChecker(checks, 100); + + eventHandler = jest.fn(); + checker.on('newVersion', eventHandler); + }); + + it.each([ + ['it detects no change', () => {}], + [ + 'fetching node version fails', + () => { + getLatestNodeVersion.mockRejectedValue(new Error('test error')); + }, + ], + [ + 'fetching rollup version fails', + () => { + getLatestRollupVersion.mockRejectedValue(new Error('test error')); + }, + ], + [ + 'fetching node version returns undefined', + () => { + getLatestNodeVersion.mockResolvedValue(undefined); + }, + ], + ])('does not emit an event if %s', async (_, patchFn) => { + patchFn(); + for (let run = 0; run < 5; run++) { + await expect(checker.trigger()).resolves.toBeUndefined(); + expect(eventHandler).not.toHaveBeenCalled(); + } + }); + + it('emits newVersion when node version changes', async () => { + getLatestNodeVersion.mockResolvedValueOnce('0.2.0'); + await checker.trigger(); + expect(eventHandler).toHaveBeenCalledWith({ + name: 'node', + currentVersion: '0.1.0', + latestVersion: '0.2.0', + }); + }); + + it('emits newVersion when rollup version changes', async () => { + getLatestRollupVersion.mockResolvedValueOnce('999'); + await checker.trigger(); + expect(eventHandler).toHaveBeenCalledWith({ + name: 'rollup', + currentVersion: '42', + latestVersion: '999', + }); + }); + + it('emits for each changed version independently', async () => { + getLatestNodeVersion.mockResolvedValueOnce('0.2.0'); + getLatestRollupVersion.mockResolvedValueOnce('999'); + await checker.trigger(); + expect(eventHandler).toHaveBeenCalledTimes(2); + }); +}); diff --git a/yarn-project/stdlib/src/update-checker/version_checker.ts b/yarn-project/stdlib/src/update-checker/version_checker.ts new file mode 100644 index 000000000000..b239ad9a2ec1 --- /dev/null +++ b/yarn-project/stdlib/src/update-checker/version_checker.ts @@ -0,0 +1,65 @@ +import { createLogger } from '@aztec/foundation/log'; +import { RunningPromise } from '@aztec/foundation/promise'; + +import { EventEmitter } from 'node:events'; + +export type EventMap = { + newVersion: [{ name: string; currentVersion: string; latestVersion: string }]; +}; + +export type VersionCheck = { + name: string; + currentVersion: string; + getLatestVersion: () => Promise; +}; + +export class VersionChecker extends EventEmitter { + private runningPromise: RunningPromise; + constructor( + private checks: Array, + intervalCheckMs = 60_000, + private logger = createLogger('version_checker'), + ) { + super(); + this.runningPromise = new RunningPromise(this.run, logger, intervalCheckMs); + } + + public start(): void { + if (this.runningPromise.isRunning()) { + this.logger.warn('VersionChecker is already running'); + return; + } + + this.runningPromise.start(); + this.logger.info('Version check started'); + } + + public trigger(): Promise { + return this.runningPromise.trigger(); + } + + public async stop(): Promise { + if (!this.runningPromise.isRunning()) { + this.logger.warn('VersionChecker is not running'); + return; + } + + await this.runningPromise.stop(); + this.logger.info('Version checker stopped'); + } + + private run = async () => { + await Promise.allSettled(this.checks.map(check => this.checkVersion(check))); + }; + + private async checkVersion({ name, currentVersion, getLatestVersion }: VersionCheck): Promise { + try { + const latestVersion = await getLatestVersion(); + if (latestVersion && latestVersion !== currentVersion) { + this.emit('newVersion', { name, latestVersion, currentVersion }); + } + } catch (err) { + this.logger.warn(`Error checking for new ${name} versions: ${err}`, { err }); + } + } +} From df0235c768164d201e18bbd84efe6c33ac5f4a8c Mon Sep 17 00:00:00 2001 From: danielntmd <162406516+danielntmd@users.noreply.github.com> Date: Tue, 3 Mar 2026 04:56:47 -0500 Subject: [PATCH 06/37] fix: (A-592) p2p client proposal tx collector test (#20998) The aggregator worker may wait up to MAX_PEER_WAIT_MS for peer connections before starting collection, so we must give it at least that much additional headroom beyond the collection timeout. Co-authored-by: danielntmd --- yarn-project/p2p/src/testbench/worker_client_manager.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/yarn-project/p2p/src/testbench/worker_client_manager.ts b/yarn-project/p2p/src/testbench/worker_client_manager.ts index d6f3b250fa96..d0e3d3d1334f 100644 --- a/yarn-project/p2p/src/testbench/worker_client_manager.ts +++ b/yarn-project/p2p/src/testbench/worker_client_manager.ts @@ -483,7 +483,8 @@ class WorkerClientManager { }; this.processes[0].send(aggregatorCmd); - const result = await this.waitForBenchResult(0, config.timeoutMs + 30000); + const aggregatorBudgetMs = config.timeoutMs + BENCHMARK_CONSTANTS.MAX_PEER_WAIT_MS + 30000; + const result = await this.waitForBenchResult(0, aggregatorBudgetMs); this.logger.info( `Benchmark complete: fetched=${result.fetchedCount}/${config.txCount}, duration=${result.durationMs.toFixed(0)}ms, success=${result.success}`, From d77a65230cf2ddb8967cb2431afeca2601d1a9a8 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Mar 2026 11:13:38 +0000 Subject: [PATCH 07/37] refactor: use publishers-per-pod in deployments (#21039) This PR changes `VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY` to `VALIDATOR_PUBLISHERS_PER_REPLICA` to reduce the number of overall publishers used for a test deployment Fix A-609 --- spartan/CLAUDE.md | 19 ++++--- .../aztec-keystore/templates/batchjob.yaml | 34 ++++++------- spartan/aztec-keystore/values.yaml | 2 +- .../scripts/setup-attester-keystore.sh | 50 ++++++++----------- .../templates/env.configmap.yaml | 2 +- spartan/aztec-validator/values.yaml | 2 +- spartan/environments/alpha-net.env | 2 +- spartan/environments/devnet.env | 2 +- spartan/environments/five-tps-long-epoch.env | 2 +- spartan/environments/five-tps-short-epoch.env | 2 +- spartan/environments/kind-minimal.env | 2 +- spartan/environments/kind-provers.env | 2 +- spartan/environments/mbps-net.env | 2 +- spartan/environments/next-net.env | 2 +- spartan/environments/next-scenario.env | 2 +- spartan/environments/prove-n-tps-fake.env | 2 +- spartan/environments/prove-n-tps-real.env | 2 +- spartan/environments/staging-ignition.env | 2 +- spartan/environments/staging-public.env | 2 +- spartan/environments/staging.local.env | 2 +- spartan/environments/ten-tps-long-epoch.env | 2 +- spartan/environments/ten-tps-short-epoch.env | 2 +- spartan/environments/testnet.env | 2 +- spartan/environments/tps-scenario.env | 2 +- .../scripts/calculate_publisher_indices.sh | 5 +- spartan/scripts/check_env_vars.sh | 2 +- spartan/scripts/deploy_network.sh | 9 ++-- spartan/terraform/deploy-aztec-infra/main.tf | 8 +-- .../terraform/deploy-aztec-infra/variables.tf | 6 +-- spartan/terraform/modules/web3signer/main.tf | 2 +- .../terraform/modules/web3signer/variables.tf | 6 +-- 31 files changed, 89 insertions(+), 94 deletions(-) diff --git a/spartan/CLAUDE.md b/spartan/CLAUDE.md index ad087602b7d5..460977382e44 100644 --- a/spartan/CLAUDE.md +++ b/spartan/CLAUDE.md @@ -57,7 +57,7 @@ The main entry point is `terraform/deploy-aztec-infra/`: **aztec-validator** (extends aztec-node): - Wrapper chart with `aztec-node` as dependency (aliased as `validator`) - Adds validator-specific ConfigMap (`env.configmap.yaml`) -- Configures mnemonic, validators-per-node, publisher keys +- Configures mnemonic, validators-per-node, publishers-per-replica **aztec-prover-stack**: - Multi-component: prover node, broker, and agent replicas @@ -263,26 +263,31 @@ locals { **Key derivation via Terraform + `setup-attester-keystore.sh`:** -Each release receives a different `PUBLISHER_KEY_INDEX_START` from Terraform: +Publishers are allocated **per replica (pod)**, not per attester key. Each release receives a different `PUBLISHER_KEY_INDEX_START` from Terraform: ```hcl # In main.tf custom_settings per release: "validator.node.env.PUBLISHER_KEY_INDEX_START" = var.VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX + - (idx * (var.VALIDATORS_PER_NODE * var.VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY * var.VALIDATOR_REPLICAS)) + (idx * (var.VALIDATOR_PUBLISHERS_PER_REPLICA * var.VALIDATOR_REPLICAS)) ``` -Example with 4 replicas, 12 validators/node, 2 publishers/key, base index 5000: +Example with 4 replicas, 4 publishers/replica, base index 5000: - Primary (idx=0): `PUBLISHER_KEY_INDEX_START = 5000` -- HA-1 (idx=1): `PUBLISHER_KEY_INDEX_START = 5000 + (1 * 12 * 2 * 4) = 5096` +- HA-1 (idx=1): `PUBLISHER_KEY_INDEX_START = 5000 + (1 * 4 * 4) = 5016` At runtime, `setup-attester-keystore.sh` calculates publisher indices: ```bash # POD_INDEX extracted from pod name (validator-0 → 0, validator-1 → 1, etc.) -PUBLISHER_KEY_INDEX=$((POD_INDEX * VALIDATORS_PER_NODE * PUBLISHERS_PER_VALIDATOR_KEY + PUBLISHER_KEY_INDEX_START)) +PUBLISHER_KEY_INDEX=$((POD_INDEX * VALIDATOR_PUBLISHERS_PER_REPLICA + PUBLISHER_KEY_INDEX_START)) ``` -This ensures each release uses non-overlapping publisher key ranges. +The keystore uses **schema v2** with a top-level `publisher` array shared by all validators on the pod: +```json +{"schemaVersion": 2, "publisher": ["0x1", "0x2", "0x3", "0x4"], "validators": [{"attester": "..."}]} +``` + +This ensures each release uses non-overlapping publisher key ranges while decoupling publisher count from attester count. **HA coordination:** - Both releases connect to shared PostgreSQL via `VALIDATOR_HA_DATABASE_URL` diff --git a/spartan/aztec-keystore/templates/batchjob.yaml b/spartan/aztec-keystore/templates/batchjob.yaml index b940f833b03d..e02b62914c49 100644 --- a/spartan/aztec-keystore/templates/batchjob.yaml +++ b/spartan/aztec-keystore/templates/batchjob.yaml @@ -33,7 +33,7 @@ spec: NODES="{{ .Values.attesters.nodeCount }}" HA_COUNT="{{ .Values.attesters.haCount }}" ATTESTERS_PER_NODE="{{ .Values.attesters.attestersPerNode }}" - PUBLISHERS_PER_VALIDATOR_KEY="{{ .Values.publishers.perValidatorKey }}" + VALIDATOR_PUBLISHERS_PER_REPLICA="{{ .Values.publishers.perReplica }}" ATTESTER_KEY_INDEX_START="{{ .Values.attesters.mnemonicStartIndex }}" PUBLISHER_KEY_INDEX_START="{{ .Values.publishers.mnemonicStartIndex }}" PROVER_COUNT=1 @@ -73,29 +73,27 @@ spec: printf '%s' "$addr" >> "$ADDR_FILE" done - # Publishers: start index per node, then pack by validator j and publisher p - pub_base=$((PUBLISHER_KEY_INDEX_START + i * ATTESTERS_PER_NODE * PUBLISHERS_PER_VALIDATOR_KEY)) - for ((j=0;j> "$PUB_KS_FILE" - printf 'type: file-raw\nkeyType: SECP256K1\nprivateKey: %s\n' "$ppk" >> "$PUB_KS_FILE" - - # write addresses CSV per node - if [[ $j -gt 0 || $p -gt 0 ]]; then printf ',' >> "$PUB_ADDR_FILE"; fi - printf '%s' "$paddr" >> "$PUB_ADDR_FILE" - done + # Publishers: flat pool per replica (shared by all attesters on the node) + pub_base=$((PUBLISHER_KEY_INDEX_START + i * VALIDATOR_PUBLISHERS_PER_REPLICA)) + for ((p=0;p> "$PUB_KS_FILE" + printf 'type: file-raw\nkeyType: SECP256K1\nprivateKey: %s\n' "$ppk" >> "$PUB_KS_FILE" + + # write addresses CSV per node + [[ $p -gt 0 ]] && printf ',' >> "$PUB_ADDR_FILE" + printf '%s' "$paddr" >> "$PUB_ADDR_FILE" done echo "Generated config for attesters on node $i" done # Generate HA publisher keys (separate key ranges for each HA release) - PUBLISHERS_PER_STS=$((NODES * ATTESTERS_PER_NODE * PUBLISHERS_PER_VALIDATOR_KEY)) + PUBLISHERS_PER_STS=$((NODES * VALIDATOR_PUBLISHERS_PER_REPLICA)) for ((ha=0;ha "$KEY_STORE_DIRECTORY/attesters.json" -echo "Generated configuration for $VALIDATORS_PER_NODE validators with $PUBLISHERS_PER_VALIDATOR_KEY publishers each" +echo "Generated configuration for $VALIDATORS_PER_NODE validators with $VALIDATOR_PUBLISHERS_PER_REPLICA shared publishers per replica" diff --git a/spartan/aztec-validator/templates/env.configmap.yaml b/spartan/aztec-validator/templates/env.configmap.yaml index 0fc49db0c655..5f5377f1f124 100644 --- a/spartan/aztec-validator/templates/env.configmap.yaml +++ b/spartan/aztec-validator/templates/env.configmap.yaml @@ -6,5 +6,5 @@ metadata: {{- include "chart.labels" . | nindent 4 }} data: VALIDATORS_PER_NODE: {{ .Values.validator.validatorsPerNode | quote }} - PUBLISHERS_PER_VALIDATOR_KEY: {{ .Values.validator.publishersPerValidatorKey | default 1 | quote }} + VALIDATOR_PUBLISHERS_PER_REPLICA: {{ .Values.validator.publishersPerReplica | default 4 | quote }} WEB3_SIGNER_URL: {{ .Values.validator.web3signerUrl | default "" | quote }} diff --git a/spartan/aztec-validator/values.yaml b/spartan/aztec-validator/values.yaml index 26f68e65e6df..e47d12d0cb67 100644 --- a/spartan/aztec-validator/values.yaml +++ b/spartan/aztec-validator/values.yaml @@ -14,7 +14,7 @@ validator: mnemonic: "test test test test test test test test test test test junk" mnemonicStartIndex: 2000 validatorsPerNode: 1 - publishersPerValidatorKey: 2 + publishersPerReplica: 4 publisherMnemonicStartIndex: 7000 persistence: diff --git a/spartan/environments/alpha-net.env b/spartan/environments/alpha-net.env index ba4758a6acc7..fad91bbe53f5 100644 --- a/spartan/environments/alpha-net.env +++ b/spartan/environments/alpha-net.env @@ -39,7 +39,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=12 VALIDATORS_PER_NODE=4 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_RESOURCE_PROFILE="2-core-dedicated" diff --git a/spartan/environments/devnet.env b/spartan/environments/devnet.env index d878996cdac1..106979eef6bb 100644 --- a/spartan/environments/devnet.env +++ b/spartan/environments/devnet.env @@ -52,7 +52,7 @@ AZTEC_TARGET_COMMITTEE_SIZE=1 VALIDATOR_MNEMONIC_START_INDEX=$((1 + MNEMONIC_INDEX_OFFSET)) VALIDATOR_INDICES=$(seq -s ',' $VALIDATOR_MNEMONIC_START_INDEX $((VALIDATOR_MNEMONIC_START_INDEX + TOTAL_VALIDATORS - 1))) VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=$((5000 + MNEMONIC_INDEX_OFFSET)) -PUBLISHERS_PER_VALIDATOR_KEY=1 +VALIDATOR_PUBLISHERS_PER_REPLICA=8 SEQ_MIN_TX_PER_BLOCK=0 SEQ_MAX_TX_PER_BLOCK=32 diff --git a/spartan/environments/five-tps-long-epoch.env b/spartan/environments/five-tps-long-epoch.env index bca34b1bfd8f..a5d3ac217a99 100644 --- a/spartan/environments/five-tps-long-epoch.env +++ b/spartan/environments/five-tps-long-epoch.env @@ -25,7 +25,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=12 VALIDATORS_PER_NODE=4 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_RESOURCE_PROFILE="2-core-dedicated" diff --git a/spartan/environments/five-tps-short-epoch.env b/spartan/environments/five-tps-short-epoch.env index 8ddca2ff57c7..6ac77948ca01 100644 --- a/spartan/environments/five-tps-short-epoch.env +++ b/spartan/environments/five-tps-short-epoch.env @@ -25,7 +25,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=12 VALIDATORS_PER_NODE=4 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_RESOURCE_PROFILE="2-core-dedicated" diff --git a/spartan/environments/kind-minimal.env b/spartan/environments/kind-minimal.env index bdc7d62f22f4..c70b55aede50 100644 --- a/spartan/environments/kind-minimal.env +++ b/spartan/environments/kind-minimal.env @@ -43,7 +43,7 @@ AZTEC_DOCKER_IMAGE=${AZTEC_DOCKER_IMAGE:-aztecprotocol/aztec:latest} # Validators - minimal setup for upgrade test VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=1 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 # Provers - minimal for faster testing diff --git a/spartan/environments/kind-provers.env b/spartan/environments/kind-provers.env index afc6ce9a97b3..f6e0482d7e1a 100644 --- a/spartan/environments/kind-provers.env +++ b/spartan/environments/kind-provers.env @@ -36,7 +36,7 @@ OTEL_COLLECTOR_ENDPOINT="http://metrics-opentelemetry-collector.metrics:4318" # Validators VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 # We allocate 0.5 per validator, so 4 * 0.5 * 12 = 24 cores total -PUBLISHERS_PER_VALIDATOR_KEY=1 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 # Provers diff --git a/spartan/environments/mbps-net.env b/spartan/environments/mbps-net.env index 0493815e812c..4357bf8fc037 100644 --- a/spartan/environments/mbps-net.env +++ b/spartan/environments/mbps-net.env @@ -38,7 +38,7 @@ AZTEC_LAG_IN_EPOCHS_FOR_RANDAO=2 VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 PUBLISHERS_PER_PROVER=2 diff --git a/spartan/environments/next-net.env b/spartan/environments/next-net.env index 1dbe575eae9f..f01c43e4d19a 100644 --- a/spartan/environments/next-net.env +++ b/spartan/environments/next-net.env @@ -41,7 +41,7 @@ AZTEC_LAG_IN_EPOCHS_FOR_RANDAO=2 VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 PUBLISHERS_PER_PROVER=2 diff --git a/spartan/environments/next-scenario.env b/spartan/environments/next-scenario.env index 3b11279aed69..b0668e5c960d 100644 --- a/spartan/environments/next-scenario.env +++ b/spartan/environments/next-scenario.env @@ -20,7 +20,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=1 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 RPC_REPLICAS=2 diff --git a/spartan/environments/prove-n-tps-fake.env b/spartan/environments/prove-n-tps-fake.env index 1197acde6515..2fe45c46745e 100644 --- a/spartan/environments/prove-n-tps-fake.env +++ b/spartan/environments/prove-n-tps-fake.env @@ -21,7 +21,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=1 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 REAL_VERIFIER=false diff --git a/spartan/environments/prove-n-tps-real.env b/spartan/environments/prove-n-tps-real.env index 4be76065b0ca..cc6442210832 100644 --- a/spartan/environments/prove-n-tps-real.env +++ b/spartan/environments/prove-n-tps-real.env @@ -22,7 +22,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=1 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 REAL_VERIFIER=true diff --git a/spartan/environments/staging-ignition.env b/spartan/environments/staging-ignition.env index b490b6248c4a..5ae4843aa4ec 100644 --- a/spartan/environments/staging-ignition.env +++ b/spartan/environments/staging-ignition.env @@ -33,7 +33,7 @@ CREATE_ROLLUP_CONTRACTS=${CREATE_ROLLUP_CONTRACTS:-false} VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 PUBLISHERS_PER_PROVER=2 diff --git a/spartan/environments/staging-public.env b/spartan/environments/staging-public.env index 100900a664b0..40fbc48e4f3b 100644 --- a/spartan/environments/staging-public.env +++ b/spartan/environments/staging-public.env @@ -38,7 +38,7 @@ P2P_TX_POOL_DELETE_TXS_AFTER_REORG=true VALIDATOR_REPLICAS=5 VALIDATORS_PER_NODE=16 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_HA_REPLICAS=1 VALIDATOR_RESOURCE_PROFILE="prod-spot" diff --git a/spartan/environments/staging.local.env b/spartan/environments/staging.local.env index b0f06c35b38e..99054b7b87b1 100644 --- a/spartan/environments/staging.local.env +++ b/spartan/environments/staging.local.env @@ -14,7 +14,7 @@ FUNDING_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=12 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 PUBLISHERS_PER_PROVER=2 diff --git a/spartan/environments/ten-tps-long-epoch.env b/spartan/environments/ten-tps-long-epoch.env index 51b6d8463123..a91cb0995253 100644 --- a/spartan/environments/ten-tps-long-epoch.env +++ b/spartan/environments/ten-tps-long-epoch.env @@ -25,7 +25,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=12 VALIDATORS_PER_NODE=4 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_RESOURCE_PROFILE="2-core-dedicated" diff --git a/spartan/environments/ten-tps-short-epoch.env b/spartan/environments/ten-tps-short-epoch.env index 313baed45607..4114d3b0a928 100644 --- a/spartan/environments/ten-tps-short-epoch.env +++ b/spartan/environments/ten-tps-short-epoch.env @@ -25,7 +25,7 @@ OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=12 VALIDATORS_PER_NODE=4 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_RESOURCE_PROFILE="2-core-dedicated" diff --git a/spartan/environments/testnet.env b/spartan/environments/testnet.env index 0785c9ffa79e..f72af734d6a2 100644 --- a/spartan/environments/testnet.env +++ b/spartan/environments/testnet.env @@ -75,7 +75,7 @@ RPC_INGRESS_SSL_CERT_NAMES='["testnet-rpc-cert"]' VALIDATOR_REPLICAS=4 VALIDATORS_PER_NODE=64 -PUBLISHERS_PER_VALIDATOR_KEY=1 +VALIDATOR_PUBLISHERS_PER_REPLICA=8 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_HA_REPLICAS=1 VALIDATOR_RESOURCE_PROFILE="prod-spot" diff --git a/spartan/environments/tps-scenario.env b/spartan/environments/tps-scenario.env index 5572b3f77db9..9ebddc63fb33 100644 --- a/spartan/environments/tps-scenario.env +++ b/spartan/environments/tps-scenario.env @@ -34,7 +34,7 @@ AZTEC_MANA_TARGET=2147483647 VALIDATOR_REPLICAS=12 VALIDATORS_PER_NODE=4 -PUBLISHERS_PER_VALIDATOR_KEY=2 +VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_RESOURCE_PROFILE="2-core-dedicated" diff --git a/spartan/scripts/calculate_publisher_indices.sh b/spartan/scripts/calculate_publisher_indices.sh index 709cfd52e8cd..1b8ec890ab8b 100755 --- a/spartan/scripts/calculate_publisher_indices.sh +++ b/spartan/scripts/calculate_publisher_indices.sh @@ -28,7 +28,7 @@ source "$ENVIRONMENT_FILE" # Set defaults (same as deploy_network.sh) VALIDATOR_REPLICAS=${VALIDATOR_REPLICAS:-4} VALIDATORS_PER_NODE=${VALIDATORS_PER_NODE:-12} -PUBLISHERS_PER_VALIDATOR_KEY=${PUBLISHERS_PER_VALIDATOR_KEY:-2} +VALIDATOR_PUBLISHERS_PER_REPLICA=${VALIDATOR_PUBLISHERS_PER_REPLICA:-4} VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=${VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX:-5000} VALIDATOR_HA_REPLICAS=${VALIDATOR_HA_REPLICAS:-0} @@ -36,8 +36,7 @@ PUBLISHERS_PER_PROVER=${PUBLISHERS_PER_PROVER:-2} PROVER_PUBLISHER_MNEMONIC_START_INDEX=${PROVER_PUBLISHER_MNEMONIC_START_INDEX:-8000} # Calculate validator publisher indices -TOTAL_VALIDATOR_KEYS=$((VALIDATOR_REPLICAS * VALIDATORS_PER_NODE)) -TOTAL_VALIDATOR_PUBLISHERS=$((TOTAL_VALIDATOR_KEYS * PUBLISHERS_PER_VALIDATOR_KEY * (1 + VALIDATOR_HA_REPLICAS))) +TOTAL_VALIDATOR_PUBLISHERS=$((VALIDATOR_REPLICAS * VALIDATOR_PUBLISHERS_PER_REPLICA * (1 + VALIDATOR_HA_REPLICAS))) VALIDATOR_PUBLISHER_INDICES="" if (( TOTAL_VALIDATOR_PUBLISHERS > 0 )); then diff --git a/spartan/scripts/check_env_vars.sh b/spartan/scripts/check_env_vars.sh index f4f92e9b6f10..2f2cb337800e 100755 --- a/spartan/scripts/check_env_vars.sh +++ b/spartan/scripts/check_env_vars.sh @@ -176,7 +176,7 @@ EXCLUDED_VARS_ARRAY=( "VALIDATOR_VALUES" "VALUES_FILE" "PUBLISHER_KEY_INDEX_START" - "PUBLISHERS_PER_VALIDATOR_KEY" + "VALIDATOR_PUBLISHERS_PER_REPLICA" "PUBLISHERS_PER_PROVER" ) diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index dc40910b6de2..4984fbaa2daa 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -88,7 +88,7 @@ VALIDATOR_MNEMONIC_START_INDEX=${VALIDATOR_MNEMONIC_START_INDEX:-1} VALIDATORS_PER_NODE=${VALIDATORS_PER_NODE:-12} VALIDATOR_REPLICAS=${VALIDATOR_REPLICAS:-4} VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=${VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX:-5000} -PUBLISHERS_PER_VALIDATOR_KEY=${PUBLISHERS_PER_VALIDATOR_KEY:-2} +VALIDATOR_PUBLISHERS_PER_REPLICA=${VALIDATOR_PUBLISHERS_PER_REPLICA:-4} VALIDATOR_HA_REPLICAS=${VALIDATOR_HA_REPLICAS:-0} PROVER_PUBLISHER_MNEMONIC_START_INDEX=${PROVER_PUBLISHER_MNEMONIC_START_INDEX:-8000} PUBLISHERS_PER_PROVER=${PUBLISHERS_PER_PROVER:-1} @@ -214,9 +214,8 @@ fi # Compute and include publisher indices in prefunding list # Uses env overrides when provided, otherwise falls back to values.yaml defaults -TOTAL_VALIDATOR_KEYS=$((VALIDATOR_REPLICAS * VALIDATORS_PER_NODE)) -# Total publishers = keys * publishers_per_key * (1 + HA_REPLICAS) -TOTAL_VALIDATOR_PUBLISHERS=$((TOTAL_VALIDATOR_KEYS * PUBLISHERS_PER_VALIDATOR_KEY * (1 + VALIDATOR_HA_REPLICAS))) +# Total publishers = replicas * publishers_per_replica * (1 + HA_REPLICAS) +TOTAL_VALIDATOR_PUBLISHERS=$((VALIDATOR_REPLICAS * VALIDATOR_PUBLISHERS_PER_REPLICA * (1 + VALIDATOR_HA_REPLICAS))) if (( TOTAL_VALIDATOR_PUBLISHERS > 0 )); then VALIDATOR_PUBLISHER_RANGE=$(seq "$VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX" $((VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX + TOTAL_VALIDATOR_PUBLISHERS - 1)) | tr '\n' ',' | sed 's/,$//') @@ -527,7 +526,7 @@ VALIDATOR_MNEMONIC_START_INDEX = ${VALIDATOR_MNEMONIC_START_INDEX} VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX = ${VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX} VALIDATORS_PER_NODE = ${VALIDATORS_PER_NODE} VALIDATOR_REPLICAS = ${VALIDATOR_REPLICAS} -VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY = ${PUBLISHERS_PER_VALIDATOR_KEY} +VALIDATOR_PUBLISHERS_PER_REPLICA = ${VALIDATOR_PUBLISHERS_PER_REPLICA} VALIDATOR_HA_REPLICAS = ${VALIDATOR_HA_REPLICAS} SEQ_MIN_TX_PER_BLOCK = ${SEQ_MIN_TX_PER_BLOCK} SEQ_MAX_TX_PER_BLOCK = ${SEQ_MAX_TX_PER_BLOCK} diff --git a/spartan/terraform/deploy-aztec-infra/main.tf b/spartan/terraform/deploy-aztec-infra/main.tf index 74021428bac8..fc46ee5e3b94 100644 --- a/spartan/terraform/deploy-aztec-infra/main.tf +++ b/spartan/terraform/deploy-aztec-infra/main.tf @@ -51,7 +51,7 @@ module "web3signer" { VALIDATOR_HA_REPLICAS = tonumber(var.VALIDATOR_HA_REPLICAS) VALIDATOR_MNEMONIC_START_INDEX = tonumber(var.VALIDATOR_MNEMONIC_START_INDEX) VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX = tonumber(var.VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX) - VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY = var.VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY + VALIDATOR_PUBLISHERS_PER_REPLICA = var.VALIDATOR_PUBLISHERS_PER_REPLICA PROVER_COUNT = tonumber(var.PROVER_REPLICAS) PUBLISHERS_PER_PROVER = tonumber(var.PROVER_PUBLISHERS_PER_PROVER) PROVER_PUBLISHER_MNEMONIC_START_INDEX = tonumber(var.PROVER_PUBLISHER_MNEMONIC_START_INDEX) @@ -185,7 +185,7 @@ locals { "validator.mnemonic" = var.VALIDATOR_MNEMONIC "validator.mnemonicStartIndex" = var.VALIDATOR_MNEMONIC_START_INDEX "validator.validatorsPerNode" = var.VALIDATORS_PER_NODE - "validator.publishersPerValidatorKey" = var.VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY + "validator.publishersPerReplica" = var.VALIDATOR_PUBLISHERS_PER_REPLICA "validator.publisherMnemonicStartIndex" = var.VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX "validator.replicaCount" = var.VALIDATOR_REPLICAS "validator.sentinel.enabled" = var.SENTINEL_ENABLED @@ -208,7 +208,7 @@ locals { "validator.node.env.KEY_INDEX_START" = var.VALIDATOR_MNEMONIC_START_INDEX "validator.node.env.PUBLISHER_KEY_INDEX_START" = var.VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX "validator.node.env.VALIDATORS_PER_NODE" = var.VALIDATORS_PER_NODE - "validator.node.env.PUBLISHERS_PER_VALIDATOR_KEY" = var.VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY + "validator.node.env.VALIDATOR_PUBLISHERS_PER_REPLICA" = var.VALIDATOR_PUBLISHERS_PER_REPLICA "validator.node.proverRealProofs" = var.PROVER_REAL_PROOFS "validator.node.env.SEQ_MIN_TX_PER_BLOCK" = var.SEQ_MIN_TX_PER_BLOCK "validator.node.env.SEQ_MAX_TX_PER_BLOCK" = var.SEQ_MAX_TX_PER_BLOCK @@ -257,7 +257,7 @@ locals { local.validator_ha_settings, { "validator.node.env.VALIDATOR_HA_REPLICA_INDEX" = tostring(idx) - "validator.node.env.PUBLISHER_KEY_INDEX_START" = var.VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX + (idx * (var.VALIDATORS_PER_NODE * var.VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY * var.VALIDATOR_REPLICAS)) + "validator.node.env.PUBLISHER_KEY_INDEX_START" = var.VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX + (idx * (var.VALIDATOR_PUBLISHERS_PER_REPLICA * var.VALIDATOR_REPLICAS)) "validator.service.p2p.announcePort" = local.p2p_port_validators[idx] "validator.service.p2p.port" = local.p2p_port_validators[idx] } diff --git a/spartan/terraform/deploy-aztec-infra/variables.tf b/spartan/terraform/deploy-aztec-infra/variables.tf index 2a0c9a8398b9..28caede2a671 100644 --- a/spartan/terraform/deploy-aztec-infra/variables.tf +++ b/spartan/terraform/deploy-aztec-infra/variables.tf @@ -191,10 +191,10 @@ variable "VALIDATORS_PER_NODE" { default = 12 } -variable "VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY" { - description = "Number of publisher EOAs per validator key" +variable "VALIDATOR_PUBLISHERS_PER_REPLICA" { + description = "Number of publisher EOAs per validator replica (pod)" type = number - default = 1 + default = 4 } variable "VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX" { diff --git a/spartan/terraform/modules/web3signer/main.tf b/spartan/terraform/modules/web3signer/main.tf index b51b0f9f773a..af5ce6ff56f4 100644 --- a/spartan/terraform/modules/web3signer/main.tf +++ b/spartan/terraform/modules/web3signer/main.tf @@ -44,7 +44,7 @@ resource "helm_release" "keystore_setup" { } } publishers = { - perValidatorKey = var.VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY + perReplica = var.VALIDATOR_PUBLISHERS_PER_REPLICA mnemonicStartIndex = var.VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX } provers = { diff --git a/spartan/terraform/modules/web3signer/variables.tf b/spartan/terraform/modules/web3signer/variables.tf index 20b009cb6704..936e75a201f3 100644 --- a/spartan/terraform/modules/web3signer/variables.tf +++ b/spartan/terraform/modules/web3signer/variables.tf @@ -53,10 +53,10 @@ variable "VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX" { type = number } -variable "VALIDATOR_PUBLISHERS_PER_VALIDATOR_KEY" { - description = "Number of publishers for each attester key" +variable "VALIDATOR_PUBLISHERS_PER_REPLICA" { + description = "Number of publisher EOAs per validator replica (pod)" type = number - default = 0 + default = 4 } variable "AZTEC_DOCKER_IMAGE" { From 3058541d2a94119133382957be067650d36a5f7c Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Mar 2026 12:12:21 +0000 Subject: [PATCH 08/37] chore: web3signer refreshes keystore (#21045) When redeploying testnet, validators refused to come up because the attester count was changed and the new keys weren't loaded into web3signer's state even though they keystores were updated. --- .../modules/web3signer/values/web3signer.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/spartan/terraform/modules/web3signer/values/web3signer.yaml b/spartan/terraform/modules/web3signer/values/web3signer.yaml index cf4679bd1583..f72a2cbef2bb 100644 --- a/spartan/terraform/modules/web3signer/values/web3signer.yaml +++ b/spartan/terraform/modules/web3signer/values/web3signer.yaml @@ -10,7 +10,15 @@ customCommand: - /bin/bash - -c - | - /opt/web3signer/bin/web3signer --config-file /data/config.yaml eth1 + trap 'kill $(jobs -p) 2>/dev/null' EXIT + ( + sleep 30 # initial delay to let web3signer start + while true; do + curl -s -X POST http://localhost:9000/reload > /dev/null 2>&1 || true + sleep 60 + done + ) & + /opt/web3signer/bin/web3signer --config-file /data/config.yaml eth1 extraEnv: - name: K8S_NAMESPACE_NAME From bfa7fa8a3640eab33f5d3924cda38d7fd0eaa5a0 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 3 Mar 2026 10:49:12 -0300 Subject: [PATCH 09/37] feat(sequencer): set block building limits from checkpoint limits (#20974) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The checkpoint builder now tracks remaining L2 gas, DA gas, and blob fields in a checkpoint while building each block, and forwards them to the public processor. This means that a proposer will not propose blocks that overall exceed checkpoint limits, and validators will properly reject them. In addition, the proposer defaults the L2 and DA gas limits per block to the checkpoint limits divided by expected number of blocks, times two. This value is still capped by the remaining gas in the checkpoint builder, but means that a proposer will not waste the entire checkpoint gas allocation on the first block. Fixes A-528 ## Changes _As described by Claude_ - Derives per-block L2 and DA gas budgets from L1 checkpoint limits (`rollupManaLimit`, `MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT`) using the timetable's `maxNumberOfBlocks` and a configurable multiplier (default: 2) for creating proposals only (**not** for validation) - Moves blob field and gas tracking from the checkpoint proposal job loop into `CheckpointBuilder.capLimitsByCheckpointBudgets()`, which caps per-block limits by remaining checkpoint budgets for both proposer and validator paths - Plumbs `maxTxsPerBlock`, `maxL2BlockGas`, `maxDABlockGas`, and `rollupManaLimit` through to validator re-execution so limits are enforced during block validation - Replaces byte-based `maxBlockSizeInBytes` with field-based blob limits and a pre-processing blob field estimation (`getPrivateTxEffectsSizeInFields`) - Adds `gasPerBlockAllocationMultiplier` config (`SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER`, default: 2) - Makes `maxL2BlockGas` and `maxDABlockGas` optional (auto-computed from checkpoint limits when not set) 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Opus 4.6 Co-authored-by: Aztec Bot <49558828+AztecBot@users.noreply.github.com> --- .../vm2/testing/avm_inputs.testdata.bin | Bin 2084088 -> 2084088 bytes .../vm2/testing/minimal_tx.testdata.bin | Bin 189007 -> 189007 bytes .../operators/reference/changelog/v4.md | 28 +++ .../crates/types/src/constants.nr | 2 +- .../aztec-node/src/aztec-node/server.ts | 9 +- .../avm_check_circuit3.test.ts | 7 + .../avm_proving_tests/avm_proving_tester.ts | 8 + yarn-project/constants/src/constants.ts | 20 +- .../e2e_l1_publisher/e2e_l1_publisher.test.ts | 2 +- yarn-project/foundation/src/config/env_var.ts | 2 +- .../light/lightweight_checkpoint_builder.ts | 4 + .../src/job/epoch-proving-job.test.ts | 6 +- .../src/client/sequencer-client.ts | 98 +++++++- yarn-project/sequencer-client/src/config.ts | 24 +- .../sequencer/checkpoint_proposal_job.test.ts | 61 +---- .../checkpoint_proposal_job.timing.test.ts | 3 +- .../src/sequencer/checkpoint_proposal_job.ts | 63 +++-- .../checkpoint_voter.ha.integration.test.ts | 1 + .../src/sequencer/sequencer.test.ts | 6 +- .../src/sequencer/timetable.ts | 14 +- .../sequencer-client/src/sequencer/types.ts | 5 +- .../src/test/mock_checkpoint_builder.ts | 4 +- .../fixtures/public_tx_simulation_tester.ts | 19 +- .../simulator/src/public/fixtures/utils.ts | 3 +- .../public_processor/public_processor.test.ts | 21 +- .../public_processor/public_processor.ts | 64 ++--- yarn-project/stdlib/src/block/l2_block.ts | 12 + yarn-project/stdlib/src/checkpoint/index.ts | 1 + .../stdlib/src/checkpoint/validate.ts | 114 +++++++++ .../stdlib/src/interfaces/block-builder.ts | 25 +- yarn-project/stdlib/src/interfaces/configs.ts | 11 +- ...ivate_kernel_tail_circuit_public_inputs.ts | 9 + yarn-project/stdlib/src/tests/mocks.ts | 4 +- yarn-project/stdlib/src/tx/tx.test.ts | 108 +++++++++ yarn-project/stdlib/src/tx/tx.ts | 31 ++- yarn-project/validator-client/README.md | 36 +++ .../src/checkpoint_builder.test.ts | 229 ++++++++++++++++-- .../src/checkpoint_builder.ts | 63 ++++- .../src/validator.ha.integration.test.ts | 1 + .../src/validator.integration.test.ts | 34 ++- .../validator-client/src/validator.test.ts | 4 +- .../validator-client/src/validator.ts | 13 + .../server_world_state_synchronizer.ts | 8 +- 43 files changed, 945 insertions(+), 232 deletions(-) create mode 100644 yarn-project/stdlib/src/checkpoint/validate.ts diff --git a/barretenberg/cpp/src/barretenberg/vm2/testing/avm_inputs.testdata.bin b/barretenberg/cpp/src/barretenberg/vm2/testing/avm_inputs.testdata.bin index 13cf29d653c4605453ac2ec6aad2470c813eead6..515bb19cc8da277e82a21918549876e8753e9e1a 100644 GIT binary patch delta 191 zcmZwBD-wcG7(ikEiwY_N;`@O%hatKNi%X!Gugzq_83Rd+!J$hd*w}^ZA$shFZ%#dR zp4NH#zKNKRyTqE`7#zAZf{j~v9-_}~_~zBu zmRVb7@0*nN<1V+3Aw+fA9naT~a1Z-uokmrzRiTx{uN`#JgMmIwSQuak8zVSyF~$T_ f%-~@T9}6rIAjAsmMytq7{w@_PVbj!Ue0qHW@sKtU diff --git a/barretenberg/cpp/src/barretenberg/vm2/testing/minimal_tx.testdata.bin b/barretenberg/cpp/src/barretenberg/vm2/testing/minimal_tx.testdata.bin index 10e8459aa2572d62545c75002be4111371cb597e..4695356718067a7051b82964591f1c81abc644cf 100644 GIT binary patch delta 103 zcmV-t0GR*J#0$^F3$R}SKQuKrGGjM5W;r=zIW%N7V>2}|W@9-vFk(3{GBRQ^G&yB9 zH#uWtIWssmGB#s3WMnouWo2S9V>B^0W??Zilb!)20RxlzJ?yiu0!ljp1B3cKhx$DM JxB5K-RiuiJAzX=E=$C#)+wEMwY3`MkXf7re;QF$%&Rm78c2g=9Y;@ zNvUZjmX?WW28o8rrpbm$Kv^Rr!$kAR^B7eac_#m|eY1Hzlb1CkPxCL^_FuM)+ke?I Hg)RXA*sCJg diff --git a/docs/docs-operate/operators/reference/changelog/v4.md b/docs/docs-operate/operators/reference/changelog/v4.md index 8bd0522245d0..774d43713996 100644 --- a/docs/docs-operate/operators/reference/changelog/v4.md +++ b/docs/docs-operate/operators/reference/changelog/v4.md @@ -70,6 +70,31 @@ The `getL2Tips()` RPC endpoint now returns a restructured response with addition - Replace `tips.latest` with `tips.proposed` - For `checkpointed`, `proven`, and `finalized` tips, access block info via `.block` (e.g., `tips.proven.block.number`) +### Block gas limits reworked + +The byte-based block size limit has been removed and replaced with field-based blob limits and automatic gas budget computation from L1 rollup limits. + +**Removed:** + +```bash +--maxBlockSizeInBytes ($SEQ_MAX_BLOCK_SIZE_IN_BYTES) +``` + +**Changed to optional (now auto-computed from L1 if not set):** + +```bash +--maxL2BlockGas ($SEQ_MAX_L2_BLOCK_GAS) +--maxDABlockGas ($SEQ_MAX_DA_BLOCK_GAS) +``` + +**New:** + +```bash +--gasPerBlockAllocationMultiplier ($SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER) +``` + +**Migration**: Remove `SEQ_MAX_BLOCK_SIZE_IN_BYTES` from your configuration. Per-block L2 and DA gas budgets are now derived automatically as `(checkpointLimit / maxBlocks) * multiplier`, where the multiplier defaults to 2. You can still override `SEQ_MAX_L2_BLOCK_GAS` and `SEQ_MAX_DA_BLOCK_GAS` explicitly, but they will be capped at the checkpoint-level limits. + ### Setup phase allow list requires function selectors The transaction setup phase allow list now enforces function selectors, restricting which specific functions can run during setup on whitelisted contracts. Previously, any public function on a whitelisted contract or class was permitted. @@ -117,11 +142,13 @@ This replaces the previous hardcoded default and allows network operators to set Node operators can now update validator attester keys, coinbase, and fee recipient without restarting the node by calling the new `reloadKeystore` admin RPC endpoint. What is updated on reload: + - Validator attester keys (add, remove, or replace) - Coinbase and fee recipient per validator - Publisher-to-validator mapping What is NOT updated (requires restart): + - L1 publisher signers - Prover keys - HA signer connections @@ -133,6 +160,7 @@ New validators must use a publisher key already initialized at startup. Reload i The admin JSON-RPC endpoint now supports auto-generated API key authentication. **Behavior:** + - A cryptographically secure API key is auto-generated at first startup and displayed once via stdout - Only the SHA-256 hash is persisted to `/admin/api_key_hash` - The key is reused across restarts when `--data-directory` is set diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 98dbda352970..483824ce5ccf 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -1064,7 +1064,7 @@ pub global GAS_ESTIMATION_DA_GAS_LIMIT: u32 = GAS_ESTIMATION_TEARDOWN_DA_GAS_LIMIT + MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; // Default gas limits. Users should use gas estimation, or they will overpay gas fees. -// TODO: consider moving to typescript +// TODO: These are overridden in typescript-land. Remove them from here. pub global DEFAULT_TEARDOWN_L2_GAS_LIMIT: u32 = 1_000_000; // Arbitrary default number. pub global DEFAULT_L2_GAS_LIMIT: u32 = MAX_PROCESSABLE_L2_GAS; // Arbitrary default number. pub global DEFAULT_TEARDOWN_DA_GAS_LIMIT: u32 = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT / 2; // Arbitrary default number. diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 5d8bc7bd69d8..0fe264bf11e6 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -271,10 +271,11 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { config.l1Contracts = { ...config.l1Contracts, ...l1ContractsAddresses }; const rollupContract = new RollupContract(publicClient, config.l1Contracts.rollupAddress.toString()); - const [l1GenesisTime, slotDuration, rollupVersionFromRollup] = await Promise.all([ + const [l1GenesisTime, slotDuration, rollupVersionFromRollup, rollupManaLimit] = await Promise.all([ rollupContract.getL1GenesisTime(), rollupContract.getSlotDuration(), rollupContract.getVersion(), + rollupContract.getManaLimit().then(Number), ] as const); config.rollupVersion ??= Number(rollupVersionFromRollup); @@ -347,7 +348,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { // Create FullNodeCheckpointsBuilder for block proposal handling and tx validation const validatorCheckpointsBuilder = new FullNodeCheckpointsBuilder( - { ...config, l1GenesisTime, slotDuration: Number(slotDuration) }, + { ...config, l1GenesisTime, slotDuration: Number(slotDuration), rollupManaLimit }, worldStateSynchronizer, archiver, dateProvider, @@ -484,7 +485,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { // Create and start the sequencer client const checkpointsBuilder = new CheckpointsBuilder( - { ...config, l1GenesisTime, slotDuration: Number(slotDuration) }, + { ...config, l1GenesisTime, slotDuration: Number(slotDuration), rollupManaLimit }, worldStateSynchronizer, archiver, dateProvider, @@ -1275,7 +1276,7 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { const processor = publicProcessorFactory.create(merkleTreeFork, newGlobalVariables, config); // REFACTOR: Consider merging ProcessReturnValues into ProcessedTx - const [processedTxs, failedTxs, _usedTxs, returns, _blobFields, debugLogs] = await processor.process([tx]); + const [processedTxs, failedTxs, _usedTxs, returns, debugLogs] = await processor.process([tx]); // REFACTOR: Consider returning the error rather than throwing if (failedTxs.length) { this.log.warn(`Simulated tx ${txHash} fails: ${failedTxs[0].error}`, { txHash }); diff --git a/yarn-project/bb-prover/src/avm_proving_tests/avm_check_circuit3.test.ts b/yarn-project/bb-prover/src/avm_proving_tests/avm_check_circuit3.test.ts index 79f9b28cdfb5..c1e8e4d8d686 100644 --- a/yarn-project/bb-prover/src/avm_proving_tests/avm_check_circuit3.test.ts +++ b/yarn-project/bb-prover/src/avm_proving_tests/avm_check_circuit3.test.ts @@ -1,8 +1,10 @@ +import { DEFAULT_L2_GAS_LIMIT, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { AvmTestContractArtifact } from '@aztec/noir-test-contracts.js/AvmTest'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; +import { Gas } from '@aztec/stdlib/gas'; import { L2ToL1Message, ScopedL2ToL1Message } from '@aztec/stdlib/messaging'; import { NativeWorldStateService } from '@aztec/world-state'; @@ -187,9 +189,14 @@ describe('AVM check-circuit – unhappy paths 3', () => { it( 'a nested exceptional halt is recovered from in caller', async () => { + // The contract requires >200k DA gas (it allocates da_gas_left - 200_000 to the nested call). + // Use a higher DA gas limit than the default since DEFAULT_DA_GAS_LIMIT is ~196k. + const gasLimits = new Gas(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT, DEFAULT_L2_GAS_LIMIT); await tester.simProveVerifyAppLogic( { address: avmTestContractInstance.address, fnName: 'external_call_to_divide_by_zero_recovers', args: [] }, /*expectRevert=*/ false, + /*txLabel=*/ 'unlabeledTx', + gasLimits, ); }, TIMEOUT, diff --git a/yarn-project/bb-prover/src/avm_proving_tests/avm_proving_tester.ts b/yarn-project/bb-prover/src/avm_proving_tests/avm_proving_tester.ts index 54b9292be26e..2fc15a2599a9 100644 --- a/yarn-project/bb-prover/src/avm_proving_tests/avm_proving_tester.ts +++ b/yarn-project/bb-prover/src/avm_proving_tests/avm_proving_tester.ts @@ -10,6 +10,7 @@ import { import type { PublicTxResult } from '@aztec/simulator/server'; import { AvmCircuitInputs, AvmCircuitPublicInputs, PublicSimulatorConfig } from '@aztec/stdlib/avm'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { Gas } from '@aztec/stdlib/gas'; import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server'; import type { GlobalVariables } from '@aztec/stdlib/tx'; import { NativeWorldStateService } from '@aztec/world-state'; @@ -211,6 +212,7 @@ export class AvmProvingTester extends PublicTxSimulationTester { privateInsertions?: TestPrivateInsertions, txLabel: string = 'unlabeledTx', disableRevertCheck: boolean = false, + gasLimits?: Gas, ): Promise { const simTimer = new Timer(); const simRes = await this.simulateTx( @@ -221,6 +223,7 @@ export class AvmProvingTester extends PublicTxSimulationTester { feePayer, privateInsertions, txLabel, + gasLimits, ); const simDuration = simTimer.ms(); this.logger.info(`Simulation took ${simDuration} ms for tx ${txLabel}`); @@ -247,6 +250,7 @@ export class AvmProvingTester extends PublicTxSimulationTester { teardownCall?: TestEnqueuedCall, feePayer?: AztecAddress, privateInsertions?: TestPrivateInsertions, + gasLimits?: Gas, ) { return await this.simProveVerify( sender, @@ -258,6 +262,7 @@ export class AvmProvingTester extends PublicTxSimulationTester { privateInsertions, txLabel, true, + gasLimits, ); } @@ -265,6 +270,7 @@ export class AvmProvingTester extends PublicTxSimulationTester { appCall: TestEnqueuedCall, expectRevert?: boolean, txLabel: string = 'unlabeledTx', + gasLimits?: Gas, ) { await this.simProveVerify( /*sender=*/ AztecAddress.fromNumber(42), @@ -275,6 +281,8 @@ export class AvmProvingTester extends PublicTxSimulationTester { /*feePayer=*/ undefined, /*privateInsertions=*/ undefined, txLabel, + /*disableRevertCheck=*/ false, + gasLimits, ); } } diff --git a/yarn-project/constants/src/constants.ts b/yarn-project/constants/src/constants.ts index f27eb9dcaf3a..6956d82f9f7c 100644 --- a/yarn-project/constants/src/constants.ts +++ b/yarn-project/constants/src/constants.ts @@ -7,6 +7,8 @@ import { GENESIS_BLOCK_HEADER_HASH as GENESIS_BLOCK_HEADER_HASH_BIGINT, INITIAL_CHECKPOINT_NUMBER as INITIAL_CHECKPOINT_NUM_RAW, INITIAL_L2_BLOCK_NUM as INITIAL_L2_BLOCK_NUM_RAW, + MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT, + MAX_PROCESSABLE_L2_GAS, } from './constants.gen.js'; // Typescript-land-only constants @@ -17,16 +19,24 @@ export const SPONSORED_FPC_SALT = BigInt(0); export * from './constants.gen.js'; /** The initial L2 block number (typed as BlockNumber). This is the first block number in the Aztec L2 chain. */ -// Shadow the export from constants.gen above // eslint-disable-next-line import-x/export export const INITIAL_L2_BLOCK_NUM: BlockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM_RAW); /** The initial L2 checkpoint number (typed as CheckpointNumber). This is the first checkpoint number in the Aztec L2 chain. */ -// Shadow the export from constants.gen above - -export const INITIAL_L2_CHECKPOINT_NUM: CheckpointNumber = CheckpointNumber(INITIAL_CHECKPOINT_NUM_RAW); +// eslint-disable-next-line import-x/export +export const INITIAL_CHECKPOINT_NUMBER: CheckpointNumber = CheckpointNumber(INITIAL_CHECKPOINT_NUM_RAW); /** The block header hash for the genesis block 0. */ -// Shadow the export from constants.gen above // eslint-disable-next-line import-x/export export const GENESIS_BLOCK_HEADER_HASH = new Fr(GENESIS_BLOCK_HEADER_HASH_BIGINT); + +// Override the default gas limits set in noir-protocol-circuit constants with saner ones +// Note that these values are not used in noir-land and are only for use in TypeScript code, so we can set them to whatever we want. +// eslint-disable-next-line import-x/export +export const DEFAULT_L2_GAS_LIMIT = MAX_PROCESSABLE_L2_GAS; +// eslint-disable-next-line import-x/export +export const DEFAULT_TEARDOWN_L2_GAS_LIMIT = DEFAULT_L2_GAS_LIMIT / 8; +// eslint-disable-next-line import-x/export +export const DEFAULT_DA_GAS_LIMIT = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT / 4; +// eslint-disable-next-line import-x/export +export const DEFAULT_TEARDOWN_DA_GAS_LIMIT = DEFAULT_DA_GAS_LIMIT / 2; diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 26e874e2629f..59a7dd5c6178 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -450,7 +450,7 @@ describe('L1Publisher integration', () => { const checkpoint = await buildCheckpoint(globalVariables, txs, currentL1ToL2Messages); const block = checkpoint.blocks[0]; - const totalManaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.totalGas.l2Gas)), Fr.ZERO); + const totalManaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO); expect(totalManaUsed.toBigInt()).toEqual(block.header.totalManaUsed.toBigInt()); prevHeader = block.header; diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index a5fedef5cce2..b1a3c3734b57 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -201,12 +201,12 @@ export type EnvVar = | 'SENTINEL_ENABLED' | 'SENTINEL_HISTORY_LENGTH_IN_EPOCHS' | 'SENTINEL_HISTORIC_PROVEN_PERFORMANCE_LENGTH_IN_EPOCHS' - | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' | 'SEQ_MIN_TX_PER_BLOCK' | 'SEQ_PUBLISH_TXS_WITH_PROPOSALS' | 'SEQ_MAX_DA_BLOCK_GAS' | 'SEQ_MAX_L2_BLOCK_GAS' + | 'SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER' | 'SEQ_PUBLISHER_PRIVATE_KEY' | 'SEQ_PUBLISHER_PRIVATE_KEYS' | 'SEQ_PUBLISHER_ADDRESSES' diff --git a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts index ab32be72936d..d8784c80cd39 100644 --- a/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts +++ b/yarn-project/prover-client/src/light/lightweight_checkpoint_builder.ts @@ -154,6 +154,10 @@ export class LightweightCheckpointBuilder { return this.blocks.length; } + public getBlocks() { + return this.blocks; + } + /** * Adds a new block to the checkpoint. The tx effects must have already been inserted into the db if * this is called after tx processing, if that's not the case, then set `insertTxsEffects` to true. diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.test.ts b/yarn-project/prover-node/src/job/epoch-proving-job.test.ts index c94818623302..2ff1d48c0f21 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.test.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.test.ts @@ -134,7 +134,7 @@ describe('epoch-proving-job', () => { publicProcessor.process.mockImplementation(async txs => { const txsArray = await toArray(txs); const processedTxs = await Promise.all(txsArray.map(tx => mock({ hash: tx.getTxHash() }))); - return [processedTxs, [], txsArray, [], 0, []]; + return [processedTxs, [], txsArray, [], []]; }); }); @@ -179,7 +179,7 @@ describe('epoch-proving-job', () => { publicProcessor.process.mockImplementation(async txs => { const txsArray = await toArray(txs); const errors = txsArray.map(tx => ({ error: new Error('Failed to process tx'), tx })); - return [[], errors, [], [], 0, []]; + return [[], errors, [], [], []]; }); const job = createJob(); @@ -190,7 +190,7 @@ describe('epoch-proving-job', () => { }); it('fails if does not process all txs for a block', async () => { - publicProcessor.process.mockImplementation(_txs => Promise.resolve([[], [], [], [], 0, []])); + publicProcessor.process.mockImplementation(_txs => Promise.resolve([[], [], [], [], []])); const job = createJob(); await job.run(); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index c55521d7b233..015401c0377c 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -1,4 +1,5 @@ import type { BlobClientInterface } from '@aztec/blob-client/client'; +import { MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; import { EpochCache } from '@aztec/epoch-cache'; import { isAnvilTestChain } from '@aztec/ethereum/chain'; import { getPublicClient } from '@aztec/ethereum/client'; @@ -18,10 +19,15 @@ import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { L1Metrics, type TelemetryClient } from '@aztec/telemetry-client'; import { FullNodeCheckpointsBuilder, NodeKeystoreAdapter, type ValidatorClient } from '@aztec/validator-client'; -import { type SequencerClientConfig, getPublisherConfigFromSequencerConfig } from '../config.js'; +import { + DefaultSequencerConfig, + type SequencerClientConfig, + getPublisherConfigFromSequencerConfig, +} from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { SequencerPublisherFactory } from '../publisher/sequencer-publisher-factory.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; +import { SequencerTimetable } from '../sequencer/timetable.js'; /** * Encapsulates the full sequencer and publisher. @@ -137,17 +143,14 @@ export class SequencerClient { }); const ethereumSlotDuration = config.ethereumSlotDuration; - const l1Constants = { l1GenesisTime, slotDuration: Number(slotDuration), ethereumSlotDuration }; - const globalsBuilder = new GlobalVariableBuilder({ ...config, ...l1Constants, rollupVersion }); - - let sequencerManaLimit = config.maxL2BlockGas ?? rollupManaLimit; - if (sequencerManaLimit > rollupManaLimit) { - log.warn( - `Provided maxL2BlockGas ${sequencerManaLimit} is greater than the max allowed by L1. Setting limit to ${rollupManaLimit}.`, - ); - sequencerManaLimit = rollupManaLimit; - } + const globalsBuilder = new GlobalVariableBuilder({ + ...config, + l1GenesisTime, + slotDuration: Number(slotDuration), + ethereumSlotDuration, + rollupVersion, + }); // When running in anvil, assume we can post a tx up until one second before the end of an L1 slot. // Otherwise, we need the full L1 slot duration for publishing to ensure inclusion. @@ -157,6 +160,12 @@ export class SequencerClient { const l1PublishingTimeBasedOnChain = isAnvilTestChain(config.l1ChainId) ? 1 : ethereumSlotDuration; const l1PublishingTime = config.l1PublishingTime ?? l1PublishingTimeBasedOnChain; + // Combine user-defined block-level limits with checkpoint-level limits (from L1/constants/config) + // to derive the final per-block gas budgets fed into the sequencer. + const { maxL2BlockGas, maxDABlockGas } = this.computeBlockGasLimits(config, rollupManaLimit, l1PublishingTime, log); + + const l1Constants = { l1GenesisTime, slotDuration: Number(slotDuration), ethereumSlotDuration, rollupManaLimit }; + const sequencer = new Sequencer( publisherFactory, validatorClient, @@ -171,7 +180,7 @@ export class SequencerClient { deps.dateProvider, epochCache, rollupContract, - { ...config, l1PublishingTime, maxL2BlockGas: sequencerManaLimit }, + { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas }, telemetryClient, log, ); @@ -233,4 +242,69 @@ export class SequencerClient { get maxL2BlockGas(): number | undefined { return this.sequencer.maxL2BlockGas; } + + /** + * Computes per-block L2 and DA gas budgets based on the L1 rollup limits and the timetable. + * If the user explicitly set a limit, it is capped at the corresponding checkpoint limit. + * Otherwise, derives it as (checkpointLimit / maxBlocks) * multiplier, capped at the checkpoint limit. + */ + private static computeBlockGasLimits( + config: SequencerClientConfig, + rollupManaLimit: number, + l1PublishingTime: number, + log: ReturnType, + ): { maxL2BlockGas: number; maxDABlockGas: number } { + const maxNumberOfBlocks = new SequencerTimetable({ + ethereumSlotDuration: config.ethereumSlotDuration, + aztecSlotDuration: config.aztecSlotDuration, + l1PublishingTime, + p2pPropagationTime: config.attestationPropagationTime, + blockDurationMs: config.blockDurationMs, + enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, + }).maxNumberOfBlocks; + + const multiplier = config.gasPerBlockAllocationMultiplier ?? DefaultSequencerConfig.gasPerBlockAllocationMultiplier; + + // Compute maxL2BlockGas + let maxL2BlockGas: number; + if (config.maxL2BlockGas !== undefined) { + if (config.maxL2BlockGas > rollupManaLimit) { + log.warn( + `Provided MAX_L2_BLOCK_GAS ${config.maxL2BlockGas} exceeds L1 rollup mana limit ${rollupManaLimit} (capping)`, + ); + maxL2BlockGas = rollupManaLimit; + } else { + maxL2BlockGas = config.maxL2BlockGas; + } + } else { + maxL2BlockGas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxNumberOfBlocks) * multiplier)); + } + + // Compute maxDABlockGas + const daCheckpointLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + let maxDABlockGas: number; + if (config.maxDABlockGas !== undefined) { + if (config.maxDABlockGas > daCheckpointLimit) { + log.warn( + `Provided MAX_DA_BLOCK_GAS ${config.maxDABlockGas} exceeds DA checkpoint limit ${daCheckpointLimit} (capping)`, + ); + maxDABlockGas = daCheckpointLimit; + } else { + maxDABlockGas = config.maxDABlockGas; + } + } else { + maxDABlockGas = Math.min(daCheckpointLimit, Math.ceil((daCheckpointLimit / maxNumberOfBlocks) * multiplier)); + } + + log.info(`Computed block gas limits L2=${maxL2BlockGas} DA=${maxDABlockGas}`, { + maxL2BlockGas, + maxDABlockGas, + rollupManaLimit, + daCheckpointLimit, + maxNumberOfBlocks, + multiplier, + }); + + return { maxL2BlockGas, maxDABlockGas }; + } } diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 64a7d321a2a8..8268d92245a3 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -36,15 +36,13 @@ export type { SequencerConfig }; * Default values for SequencerConfig. * Centralized location for all sequencer configuration defaults. */ -export const DefaultSequencerConfig: ResolvedSequencerConfig = { +export const DefaultSequencerConfig = { sequencerPollingIntervalMS: 500, maxTxsPerBlock: DEFAULT_MAX_TXS_PER_BLOCK, minTxsPerBlock: 1, buildCheckpointIfEmpty: false, publishTxsWithProposals: false, - maxL2BlockGas: 10e9, - maxDABlockGas: 10e9, - maxBlockSizeInBytes: 1024 * 1024, + gasPerBlockAllocationMultiplier: 2, enforceTimeTable: true, attestationPropagationTime: DEFAULT_P2P_PROPAGATION_TIME, secondsBeforeInvalidatingBlockAsCommitteeMember: 144, // 12 L1 blocks @@ -59,7 +57,7 @@ export const DefaultSequencerConfig: ResolvedSequencerConfig = { shuffleAttestationOrdering: false, skipPushProposedBlocksToArchiver: false, skipPublishingCheckpointsPercent: 0, -}; +} satisfies ResolvedSequencerConfig; /** * Configuration settings for the SequencerClient. @@ -97,12 +95,19 @@ export const sequencerConfigMappings: ConfigMappingsType = { maxL2BlockGas: { env: 'SEQ_MAX_L2_BLOCK_GAS', description: 'The maximum L2 block gas.', - ...numberConfigHelper(DefaultSequencerConfig.maxL2BlockGas), + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), }, maxDABlockGas: { env: 'SEQ_MAX_DA_BLOCK_GAS', description: 'The maximum DA block gas.', - ...numberConfigHelper(DefaultSequencerConfig.maxDABlockGas), + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, + gasPerBlockAllocationMultiplier: { + env: 'SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER', + description: + 'Per-block gas budget multiplier for both L2 and DA gas. Budget per block is (checkpointLimit / maxBlocks) * multiplier.' + + ' Values greater than one allow early blocks to use more than their even share, relying on checkpoint-level capping for later blocks.', + ...numberConfigHelper(DefaultSequencerConfig.gasPerBlockAllocationMultiplier), }, coinbase: { env: 'COINBASE', @@ -122,11 +127,6 @@ export const sequencerConfigMappings: ConfigMappingsType = { env: 'ACVM_BINARY_PATH', description: 'The path to the ACVM binary', }, - maxBlockSizeInBytes: { - env: 'SEQ_MAX_BLOCK_SIZE_IN_BYTES', - description: 'Max block size', - ...numberConfigHelper(DefaultSequencerConfig.maxBlockSizeInBytes), - }, enforceTimeTable: { env: 'SEQ_ENFORCE_TIME_TABLE', description: 'Whether to enforce the time table when building blocks', diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts index 4361634eb771..f1a702e992f2 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts @@ -1,9 +1,3 @@ -import { - NUM_BLOCK_END_BLOB_FIELDS, - NUM_CHECKPOINT_END_MARKER_FIELDS, - NUM_FIRST_BLOCK_END_BLOB_FIELDS, -} from '@aztec/blob-lib/encoding'; -import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB } from '@aztec/constants'; import type { EpochCache } from '@aztec/epoch-cache'; import { BlockNumber, @@ -84,7 +78,7 @@ describe('CheckpointProposalJob', () => { let job: TestCheckpointProposalJob; let timetable: SequencerTimetable; - let l1Constants: L1RollupConstants; + let l1Constants: L1RollupConstants & { rollupManaLimit: number }; let config: ResolvedSequencerConfig; let lastBlockNumber: BlockNumber; @@ -147,6 +141,7 @@ describe('CheckpointProposalJob', () => { epochDuration: 16, proofSubmissionEpochs: 4, targetCommitteeSize: 48, + rollupManaLimit: Infinity, }; dateProvider = new TestDateProvider(); @@ -768,53 +763,6 @@ describe('CheckpointProposalJob', () => { // waitUntilTimeInSlot should NOT be called since the only block is the last block expect(waitSpy).not.toHaveBeenCalled(); }); - - it('tracks remaining blob field capacity across multiple blocks', async () => { - jest - .spyOn(job.getTimetable(), 'canStartNextBlock') - .mockReturnValueOnce({ canStart: true, deadline: 10, isLastBlock: false }) - .mockReturnValueOnce({ canStart: true, deadline: 18, isLastBlock: true }) - .mockReturnValue({ canStart: false, deadline: undefined, isLastBlock: false }); - - const txs = await Promise.all([makeTx(1, chainId), makeTx(2, chainId), makeTx(3, chainId)]); - - p2p.getPendingTxCount.mockResolvedValue(10); - p2p.iterateEligiblePendingTxs.mockImplementation(() => mockTxIterator(Promise.resolve(txs))); - - // Create 2 blocks - block 1 has 2 txs, block 2 has 1 tx - const block1 = await makeBlock(txs.slice(0, 2), globalVariables); - const globalVariables2 = new GlobalVariables( - chainId, - version, - BlockNumber(newBlockNumber + 1), - SlotNumber(newSlotNumber), - 0n, - coinbase, - feeRecipient, - gasFees, - ); - const block2 = await makeBlock([txs[2]], globalVariables2); - - checkpointBuilder.seedBlocks([block1, block2], [txs.slice(0, 2), [txs[2]]]); - validatorClient.collectAttestations.mockResolvedValue(getAttestations(block2)); - - await job.execute(); - - // Verify blob field limits were correctly calculated - expect(checkpointBuilder.buildBlockCalls).toHaveLength(2); - - const initialCapacity = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB - NUM_CHECKPOINT_END_MARKER_FIELDS; - - // Block 1 (first in checkpoint): gets initial capacity - first block overhead (7) - const block1MaxBlobFields = initialCapacity - NUM_FIRST_BLOCK_END_BLOB_FIELDS; - expect(checkpointBuilder.buildBlockCalls[0].opts.maxBlobFields).toBe(block1MaxBlobFields); - - // Block 2: gets remaining capacity - subsequent block overhead (6) - const block1BlobFieldsUsed = block1.body.txEffects.reduce((sum, tx) => sum + tx.getNumBlobFields(), 0); - const remainingAfterBlock1 = block1MaxBlobFields - block1BlobFieldsUsed; - const block2MaxBlobFields = remainingAfterBlock1 - NUM_BLOCK_END_BLOB_FIELDS; - expect(checkpointBuilder.buildBlockCalls[1].opts.maxBlobFields).toBe(block2MaxBlobFields); - }); }); describe('build single block', () => { @@ -833,7 +781,6 @@ describe('CheckpointProposalJob', () => { indexWithinCheckpoint: IndexWithinCheckpoint(1), buildDeadline: undefined, blockTimestamp: 0n, - remainingBlobFields: 1, txHashesAlreadyIncluded: new Set(), }); @@ -855,7 +802,6 @@ describe('CheckpointProposalJob', () => { indexWithinCheckpoint: IndexWithinCheckpoint(1), buildDeadline: undefined, blockTimestamp: 0n, - remainingBlobFields: 1, txHashesAlreadyIncluded: new Set(), }); @@ -1116,9 +1062,8 @@ class TestCheckpointProposalJob extends CheckpointProposalJob { indexWithinCheckpoint: IndexWithinCheckpoint; buildDeadline: Date | undefined; txHashesAlreadyIncluded: Set; - remainingBlobFields: number; }, - ): Promise<{ block: L2Block; usedTxs: Tx[]; remainingBlobFields: number } | { error: Error } | undefined> { + ): Promise<{ block: L2Block; usedTxs: Tx[] } | { error: Error } | undefined> { return super.buildSingleBlock(checkpointBuilder, opts); } } diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts index ad88b7d040c1..1b7cabc8fe9e 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts @@ -208,7 +208,7 @@ describe('CheckpointProposalJob Timing Tests', () => { let slasherClient: MockProxy; let metrics: MockProxy; - let l1Constants: L1RollupConstants; + let l1Constants: L1RollupConstants & { rollupManaLimit: number }; let config: ResolvedSequencerConfig; // Test state @@ -330,6 +330,7 @@ describe('CheckpointProposalJob Timing Tests', () => { epochDuration: 16, proofSubmissionEpochs: 4, targetCommitteeSize: 48, + rollupManaLimit: Infinity, }; // Initialize test state diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index 466d2f259f17..d461b68c30b4 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -1,5 +1,3 @@ -import { NUM_CHECKPOINT_END_MARKER_FIELDS, getNumBlockEndBlobFields } from '@aztec/blob-lib/encoding'; -import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB } from '@aztec/constants'; import type { EpochCache } from '@aztec/epoch-cache'; import { BlockNumber, @@ -32,7 +30,7 @@ import { type L2BlockSource, MaliciousCommitteeAttestationsAndSigners, } from '@aztec/stdlib/block'; -import type { Checkpoint } from '@aztec/stdlib/checkpoint'; +import { type Checkpoint, validateCheckpoint } from '@aztec/stdlib/checkpoint'; import { getSlotStartBuildTimestamp } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import { @@ -267,6 +265,20 @@ export class CheckpointProposalJob implements Traceable { this.setStateFn(SequencerState.ASSEMBLING_CHECKPOINT, this.slot); const checkpoint = await checkpointBuilder.completeCheckpoint(); + // Final validation round for the checkpoint before we propose it, just for safety + try { + validateCheckpoint(checkpoint, { + rollupManaLimit: this.l1Constants.rollupManaLimit, + maxL2BlockGas: this.config.maxL2BlockGas, + maxDABlockGas: this.config.maxDABlockGas, + }); + } catch (err) { + this.log.error(`Built an invalid checkpoint at slot ${this.slot} (skipping proposal)`, err, { + checkpoint: checkpoint.header.toInspect(), + }); + return undefined; + } + // Record checkpoint-level build metrics this.metrics.recordCheckpointBuild( checkpointBuildTimer.ms(), @@ -389,9 +401,6 @@ export class CheckpointProposalJob implements Traceable { const txHashesAlreadyIncluded = new Set(); const initialBlockNumber = BlockNumber(this.syncedToBlockNumber + 1); - // Remaining blob fields available for blocks (checkpoint end marker already subtracted) - let remainingBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB - NUM_CHECKPOINT_END_MARKER_FIELDS; - // Last block in the checkpoint will usually be flagged as pending broadcast, so we send it along with the checkpoint proposal let blockPendingBroadcast: { block: L2Block; txs: Tx[] } | undefined = undefined; @@ -424,7 +433,6 @@ export class CheckpointProposalJob implements Traceable { blockNumber, indexWithinCheckpoint, txHashesAlreadyIncluded, - remainingBlobFields, }); // TODO(palla/mbps): Review these conditions. We may want to keep trying in some scenarios. @@ -450,12 +458,9 @@ export class CheckpointProposalJob implements Traceable { break; } - const { block, usedTxs, remainingBlobFields: newRemainingBlobFields } = buildResult; + const { block, usedTxs } = buildResult; blocksInCheckpoint.push(block); - // Update remaining blob fields for the next block - remainingBlobFields = newRemainingBlobFields; - // Sync the proposed block to the archiver to make it available // Note that the checkpoint builder uses its own fork so it should not need to wait for this syncing // Eventually we should refactor the checkpoint builder to not need a separate long-lived fork @@ -523,18 +528,10 @@ export class CheckpointProposalJob implements Traceable { indexWithinCheckpoint: IndexWithinCheckpoint; buildDeadline: Date | undefined; txHashesAlreadyIncluded: Set; - remainingBlobFields: number; }, - ): Promise<{ block: L2Block; usedTxs: Tx[]; remainingBlobFields: number } | { error: Error } | undefined> { - const { - blockTimestamp, - forceCreate, - blockNumber, - indexWithinCheckpoint, - buildDeadline, - txHashesAlreadyIncluded, - remainingBlobFields, - } = opts; + ): Promise<{ block: L2Block; usedTxs: Tx[] } | { error: Error } | undefined> { + const { blockTimestamp, forceCreate, blockNumber, indexWithinCheckpoint, buildDeadline, txHashesAlreadyIncluded } = + opts; this.log.verbose( `Preparing block ${blockNumber} index ${indexWithinCheckpoint} at checkpoint ${this.checkpointNumber} for slot ${this.slot}`, @@ -568,16 +565,16 @@ export class CheckpointProposalJob implements Traceable { ); this.setStateFn(SequencerState.CREATING_BLOCK, this.slot); - // Calculate blob fields limit for txs (remaining capacity - this block's end overhead) - const blockEndOverhead = getNumBlockEndBlobFields(indexWithinCheckpoint === 0); - const maxBlobFieldsForTxs = remainingBlobFields - blockEndOverhead; - + // Per-block limits derived at startup by SequencerClient.computeBlockGasLimits(), further capped + // by remaining checkpoint-level budgets inside CheckpointBuilder before each block is built. const blockBuilderOptions: PublicProcessorLimits = { maxTransactions: this.config.maxTxsPerBlock, - maxBlockSize: this.config.maxBlockSizeInBytes, - maxBlockGas: new Gas(this.config.maxDABlockGas, this.config.maxL2BlockGas), - maxBlobFields: maxBlobFieldsForTxs, + maxBlockGas: + this.config.maxL2BlockGas !== undefined || this.config.maxDABlockGas !== undefined + ? new Gas(this.config.maxDABlockGas ?? Infinity, this.config.maxL2BlockGas ?? Infinity) + : undefined, deadline: buildDeadline, + isBuildingProposal: true, }; // Actually build the block by executing txs @@ -607,7 +604,7 @@ export class CheckpointProposalJob implements Traceable { } // Block creation succeeded, emit stats and metrics - const { publicGas, block, publicProcessorDuration, usedTxs, usedTxBlobFields, blockBuildDuration } = buildResult; + const { block, publicProcessorDuration, usedTxs, blockBuildDuration } = buildResult; const blockStats = { eventName: 'l2-block-built', @@ -618,7 +615,7 @@ export class CheckpointProposalJob implements Traceable { const blockHash = await block.hash(); const txHashes = block.body.txEffects.map(tx => tx.txHash); - const manaPerSec = publicGas.l2Gas / (blockBuildDuration / 1000); + const manaPerSec = block.header.totalManaUsed.toNumberUnsafe() / (blockBuildDuration / 1000); this.log.info( `Built block ${block.number} at checkpoint ${this.checkpointNumber} for slot ${this.slot} with ${numTxs} txs`, @@ -626,9 +623,9 @@ export class CheckpointProposalJob implements Traceable { ); this.eventEmitter.emit('block-proposed', { blockNumber: block.number, slot: this.slot }); - this.metrics.recordBuiltBlock(blockBuildDuration, publicGas.l2Gas); + this.metrics.recordBuiltBlock(blockBuildDuration, block.header.totalManaUsed.toNumberUnsafe()); - return { block, usedTxs, remainingBlobFields: maxBlobFieldsForTxs - usedTxBlobFields }; + return { block, usedTxs }; } catch (err: any) { this.eventEmitter.emit('block-build-failed', { reason: err.message, slot: this.slot }); this.log.error(`Error building block`, err, { blockNumber, slot: this.slot }); diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts index 65ed41a5ae48..254485252ec5 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts @@ -67,6 +67,7 @@ describe('CheckpointVoter HA Integration', () => { l1GenesisTime: 1n, slotDuration: 24, ethereumSlotDuration: DefaultL1ContractsConfig.ethereumSlotDuration, + rollupManaLimit: Infinity, }; /** diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index cb625f07002d..4f8e011c1c82 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -78,7 +78,9 @@ describe('sequencer', () => { let block: L2Block; let globalVariables: GlobalVariables; - let l1Constants: Pick; + let l1Constants: Pick & { + rollupManaLimit: number; + }; let sequencer: TestSequencer; @@ -160,7 +162,7 @@ describe('sequencer', () => { ); const l1GenesisTime = BigInt(Math.floor(Date.now() / 1000)); - l1Constants = { l1GenesisTime, slotDuration, ethereumSlotDuration }; + l1Constants = { l1GenesisTime, slotDuration, ethereumSlotDuration, rollupManaLimit: Infinity }; epochCache = mockDeep(); epochCache.isEscapeHatchOpen.mockResolvedValue(false); diff --git a/yarn-project/sequencer-client/src/sequencer/timetable.ts b/yarn-project/sequencer-client/src/sequencer/timetable.ts index 86b88a1ba99f..e692fb1a6159 100644 --- a/yarn-project/sequencer-client/src/sequencer/timetable.ts +++ b/yarn-project/sequencer-client/src/sequencer/timetable.ts @@ -1,4 +1,4 @@ -import { createLogger } from '@aztec/aztec.js/log'; +import type { Logger } from '@aztec/foundation/log'; import { CHECKPOINT_ASSEMBLE_TIME, CHECKPOINT_INITIALIZATION_TIME, @@ -80,7 +80,7 @@ export class SequencerTimetable { enforce: boolean; }, private readonly metrics?: SequencerMetrics, - private readonly log = createLogger('sequencer:timetable'), + private readonly log?: Logger, ) { this.ethereumSlotDuration = opts.ethereumSlotDuration; this.aztecSlotDuration = opts.aztecSlotDuration; @@ -132,7 +132,7 @@ export class SequencerTimetable { const initializeDeadline = this.aztecSlotDuration - minWorkToDo; this.initializeDeadline = initializeDeadline; - this.log.info( + this.log?.info( `Sequencer timetable initialized with ${this.maxNumberOfBlocks} blocks per slot (${this.enforce ? 'enforced' : 'not enforced'})`, { ethereumSlotDuration: this.ethereumSlotDuration, @@ -206,7 +206,7 @@ export class SequencerTimetable { } this.metrics?.recordStateTransitionBufferMs(Math.floor(bufferSeconds * 1000), newState); - this.log.trace(`Enough time to transition to ${newState}`, { maxAllowedTime, secondsIntoSlot }); + this.log?.trace(`Enough time to transition to ${newState}`, { maxAllowedTime, secondsIntoSlot }); } /** @@ -242,7 +242,7 @@ export class SequencerTimetable { const canStart = available >= this.minExecutionTime; const deadline = secondsIntoSlot + available; - this.log.verbose( + this.log?.verbose( `${canStart ? 'Can' : 'Cannot'} start single-block checkpoint at ${secondsIntoSlot}s into slot`, { secondsIntoSlot, maxAllowed, available, deadline }, ); @@ -262,7 +262,7 @@ export class SequencerTimetable { // Found an available sub-slot! Is this the last one? const isLastBlock = subSlot === this.maxNumberOfBlocks; - this.log.verbose( + this.log?.verbose( `Can start ${isLastBlock ? 'last block' : 'block'} in sub-slot ${subSlot} with deadline ${deadline}s`, { secondsIntoSlot, deadline, timeUntilDeadline, subSlot, maxBlocks: this.maxNumberOfBlocks }, ); @@ -272,7 +272,7 @@ export class SequencerTimetable { } // No sub-slots available with enough time - this.log.verbose(`No time left to start any more blocks`, { + this.log?.verbose(`No time left to start any more blocks`, { secondsIntoSlot, maxBlocks: this.maxNumberOfBlocks, initializationOffset: this.initializationOffset, diff --git a/yarn-project/sequencer-client/src/sequencer/types.ts b/yarn-project/sequencer-client/src/sequencer/types.ts index ef4cebf699c2..312c9613cce5 100644 --- a/yarn-project/sequencer-client/src/sequencer/types.ts +++ b/yarn-project/sequencer-client/src/sequencer/types.ts @@ -3,4 +3,7 @@ import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; export type SequencerRollupConstants = Pick< L1RollupConstants, 'ethereumSlotDuration' | 'l1GenesisTime' | 'slotDuration' ->; +> & { + /** Total L2 gas (mana) allowed per checkpoint. Fetched from L1 getManaLimit(). */ + rollupManaLimit: number; +}; diff --git a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts index 60cc606570f8..9baf133dc1fc 100644 --- a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts +++ b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts @@ -2,7 +2,6 @@ import { type BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-ty import { Fr } from '@aztec/foundation/curves/bn254'; import { L2Block } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; -import { Gas } from '@aztec/stdlib/gas'; import type { FullNodeBlockBuilderConfig, ICheckpointBlockBuilder, @@ -113,12 +112,10 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { return { block, - publicGas: Gas.empty(), publicProcessorDuration: 0, numTxs: block?.body?.txEffects?.length ?? usedTxs.length, usedTxs, failedTxs: [], - usedTxBlobFields: block?.body?.txEffects?.reduce((sum, tx) => sum + tx.getNumBlobFields(), 0) ?? 0, }; } @@ -249,6 +246,7 @@ export class MockCheckpointsBuilder implements ICheckpointsBuilder { slotDuration: 24, l1ChainId: 1, rollupVersion: 1, + rollupManaLimit: 200_000_000, }; } diff --git a/yarn-project/simulator/src/public/fixtures/public_tx_simulation_tester.ts b/yarn-project/simulator/src/public/fixtures/public_tx_simulation_tester.ts index 4261d5881d52..839d073c0cdc 100644 --- a/yarn-project/simulator/src/public/fixtures/public_tx_simulation_tester.ts +++ b/yarn-project/simulator/src/public/fixtures/public_tx_simulation_tester.ts @@ -117,6 +117,7 @@ export class PublicTxSimulationTester extends BaseAvmSimulationTester { feePayer: AztecAddress = sender, /* need some unique first nullifier for note-nonce computations */ privateInsertions: TestPrivateInsertions = { nonRevertible: { nullifiers: [new Fr(420000 + this.txCount)] } }, + gasLimits?: Gas, ): Promise { const setupCallRequests = await asyncMap(setupCalls, call => this.#createPubicCallRequestForCall(call, call.sender ?? sender), @@ -142,6 +143,7 @@ export class PublicTxSimulationTester extends BaseAvmSimulationTester { ) : new Gas(TX_DA_GAS_OVERHEAD, PUBLIC_TX_L2_GAS_OVERHEAD), defaultGlobals(), + gasLimits, ); } @@ -154,8 +156,9 @@ export class PublicTxSimulationTester extends BaseAvmSimulationTester { /* need some unique first nullifier for note-nonce computations */ privateInsertions?: TestPrivateInsertions, txLabel: string = 'unlabeledTx', + gasLimits?: Gas, ): Promise { - const tx = await this.createTx(sender, setupCalls, appCalls, teardownCall, feePayer, privateInsertions); + const tx = await this.createTx(sender, setupCalls, appCalls, teardownCall, feePayer, privateInsertions, gasLimits); await this.setFeePayerBalance(feePayer); @@ -200,8 +203,18 @@ export class PublicTxSimulationTester extends BaseAvmSimulationTester { teardownCall?: TestEnqueuedCall, feePayer?: AztecAddress, privateInsertions?: TestPrivateInsertions, + gasLimits?: Gas, ): Promise { - return await this.simulateTx(sender, setupCalls, appCalls, teardownCall, feePayer, privateInsertions, txLabel); + return await this.simulateTx( + sender, + setupCalls, + appCalls, + teardownCall, + feePayer, + privateInsertions, + txLabel, + gasLimits, + ); } /** @@ -219,6 +232,7 @@ export class PublicTxSimulationTester extends BaseAvmSimulationTester { teardownCall?: TestEnqueuedCall, feePayer?: AztecAddress, privateInsertions?: TestPrivateInsertions, + gasLimits?: Gas, ): Promise { return await this.simulateTxWithLabel( txLabel, @@ -228,6 +242,7 @@ export class PublicTxSimulationTester extends BaseAvmSimulationTester { teardownCall, feePayer, privateInsertions, + gasLimits, ); } diff --git a/yarn-project/simulator/src/public/fixtures/utils.ts b/yarn-project/simulator/src/public/fixtures/utils.ts index c058c9d7c128..b32fb1ec30d6 100644 --- a/yarn-project/simulator/src/public/fixtures/utils.ts +++ b/yarn-project/simulator/src/public/fixtures/utils.ts @@ -62,13 +62,14 @@ export async function createTxForPublicCalls( feePayer = AztecAddress.zero(), gasUsedByPrivate: Gas = Gas.empty(), globals: GlobalVariables = GlobalVariables.empty(), + gasLimits?: Gas, ): Promise { assert( setupCallRequests.length > 0 || appCallRequests.length > 0 || teardownCallRequest !== undefined, "Can't create public tx with no enqueued calls", ); // use max limits - const gasLimits = new Gas(DEFAULT_DA_GAS_LIMIT, DEFAULT_L2_GAS_LIMIT); + gasLimits = gasLimits ?? new Gas(DEFAULT_DA_GAS_LIMIT, DEFAULT_L2_GAS_LIMIT); const forPublic = PartialPrivateTailPublicInputsForPublic.empty(); diff --git a/yarn-project/simulator/src/public/public_processor/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor/public_processor.test.ts index 03276162b887..907ee1f907c6 100644 --- a/yarn-project/simulator/src/public/public_processor/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor/public_processor.test.ts @@ -188,6 +188,22 @@ describe('public_processor', () => { expect(failed).toEqual([]); }); + it('skips tx before processing if estimated blob fields would exceed limit', async function () { + const tx = await mockTxWithPublicCalls(); + // Add note hashes to inflate the estimated blob fields size + for (let i = 0; i < 10; i++) { + tx.data.forPublic!.nonRevertibleAccumulatedData.noteHashes[i] = Fr.random(); + } + // 3 overhead + 1 nullifier + 10 note hashes = 14 estimated fields + // Set a limit that is too small for even one tx + const [processed, failed] = await processor.process([tx], { maxBlobFields: 10, isBuildingProposal: true }); + + expect(processed).toEqual([]); + expect(failed).toEqual([]); + // The simulator should not have been called since the tx was skipped pre-processing + expect(publicTxSimulator.simulate).not.toHaveBeenCalled(); + }); + it('does not exceed max blob fields limit', async function () { // Create 3 private-only transactions const txs = await Promise.all(Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed }))); @@ -201,16 +217,13 @@ describe('public_processor', () => { const maxBlobFields = actualBlobFields * 2; // Process all 3 transactions with the blob field limit - const [processed, failed, _usedTxs, _returns, usedTxBlobFields] = await processor.process(txs, { maxBlobFields }); + const [processed, failed] = await processor.process(txs, { maxBlobFields }); // Should only process 2 transactions due to blob field limit expect(processed.length).toBe(2); expect(processed[0].hash).toEqual(txs[0].getTxHash()); expect(processed[1].hash).toEqual(txs[1].getTxHash()); expect(failed).toEqual([]); - - const expectedBlobFields = actualBlobFields * 2; - expect(usedTxBlobFields).toBe(expectedBlobFields); }); it('does not send a transaction to the prover if pre validation fails', async function () { diff --git a/yarn-project/simulator/src/public/public_processor/public_processor.ts b/yarn-project/simulator/src/public/public_processor/public_processor.ts index e3a776edac02..45a3d9e6906e 100644 --- a/yarn-project/simulator/src/public/public_processor/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor/public_processor.ts @@ -160,8 +160,8 @@ export class PublicProcessor implements Traceable { txs: Iterable | AsyncIterable, limits: PublicProcessorLimits = {}, validator: PublicProcessorValidator = {}, - ): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[], number, DebugLog[]]> { - const { maxTransactions, maxBlockSize, deadline, maxBlockGas, maxBlobFields } = limits; + ): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[], DebugLog[]]> { + const { maxTransactions, deadline, maxBlockGas, maxBlobFields, isBuildingProposal } = limits; const { preprocessValidator, nullifierCache } = validator; const result: ProcessedTx[] = []; const usedTxs: Tx[] = []; @@ -188,22 +188,23 @@ export class PublicProcessor implements Traceable { break; } - // Skip this tx if it'd exceed max block size const txHash = tx.getTxHash().toString(); - const preTxSizeInBytes = tx.getEstimatedPrivateTxEffectsSize(); - if (maxBlockSize !== undefined && totalSizeInBytes + preTxSizeInBytes > maxBlockSize) { - this.log.warn(`Skipping processing of tx ${txHash} sized ${preTxSizeInBytes} bytes due to block size limit`, { - txHash, - sizeInBytes: preTxSizeInBytes, - totalSizeInBytes, - maxBlockSize, - }); + + // Skip this tx if its estimated blob fields would exceed the limit. + // Only done during proposal building: during re-execution we must process the exact txs from the proposal. + const txBlobFields = tx.getPrivateTxEffectsSizeInFields(); + if (isBuildingProposal && maxBlobFields !== undefined && totalBlobFields + txBlobFields > maxBlobFields) { + this.log.warn( + `Skipping tx ${txHash} with ${txBlobFields} fields from private side effects due to blob fields limit`, + { txHash, txBlobFields, totalBlobFields, maxBlobFields }, + ); continue; } - // Skip this tx if its gas limit would exceed the block gas limit + // Skip this tx if its gas limit would exceed the block gas limit (either da or l2). + // Only done during proposal building: during re-execution we must process the exact txs from the proposal. const txGasLimit = tx.data.constants.txContext.gasSettings.gasLimits; - if (maxBlockGas !== undefined && totalBlockGas.add(txGasLimit).gtAny(maxBlockGas)) { + if (isBuildingProposal && maxBlockGas !== undefined && totalBlockGas.add(txGasLimit).gtAny(maxBlockGas)) { this.log.warn(`Skipping processing of tx ${txHash} due to block gas limit`, { txHash, txGasLimit, @@ -252,23 +253,9 @@ export class PublicProcessor implements Traceable { } const txBlobFields = processedTx.txEffect.getNumBlobFields(); - - // If the actual size of this tx would exceed block size, skip it const txSize = txBlobFields * Fr.SIZE_IN_BYTES; - if (maxBlockSize !== undefined && totalSizeInBytes + txSize > maxBlockSize) { - this.log.debug(`Skipping processed tx ${txHash} sized ${txSize} due to max block size.`, { - txHash, - sizeInBytes: txSize, - totalSizeInBytes, - maxBlockSize, - }); - // Need to revert the checkpoint here and don't go any further - await checkpoint.revert(); - this.contractsDB.revertCheckpoint(); - continue; - } - // If the actual blob fields of this tx would exceed the limit, skip it + // If the actual blob fields of this tx would exceed the limit, skip it. // Note: maxBlobFields already accounts for block end blob fields and previous blocks in checkpoint. if (maxBlobFields !== undefined && totalBlobFields + txBlobFields > maxBlobFields) { this.log.debug( @@ -286,6 +273,25 @@ export class PublicProcessor implements Traceable { continue; } + // During re-execution, check if the actual gas used by this tx would push the block over the gas limit. + // Unlike the proposal-building check (which uses declared gas limits pessimistically before processing), + // this uses actual gas and stops processing when the limit is exceeded. + if ( + !isBuildingProposal && + maxBlockGas !== undefined && + totalBlockGas.add(processedTx.gasUsed.totalGas).gtAny(maxBlockGas) + ) { + this.log.warn(`Stopping re-execution since tx ${txHash} would push block gas over limit`, { + txHash, + txGas: processedTx.gasUsed.totalGas, + totalBlockGas, + maxBlockGas, + }); + await checkpoint.revert(); + this.contractsDB.revertCheckpoint(); + break; + } + // FIXME(fcarreiro): it's ugly to have to notify the validator of nullifiers. // I'd rather pass the validators the processedTx as well and let them deal with it. nullifierCache?.addNullifiers(processedTx.txEffect.nullifiers.map(n => n.toBuffer())); @@ -368,7 +374,7 @@ export class PublicProcessor implements Traceable { totalSizeInBytes, }); - return [result, failed, usedTxs, returns, totalBlobFields, debugLogs]; + return [result, failed, usedTxs, returns, debugLogs]; } private async checkWorldStateUnchanged( diff --git a/yarn-project/stdlib/src/block/l2_block.ts b/yarn-project/stdlib/src/block/l2_block.ts index 15f037082a91..362a36f996a5 100644 --- a/yarn-project/stdlib/src/block/l2_block.ts +++ b/yarn-project/stdlib/src/block/l2_block.ts @@ -1,4 +1,5 @@ import { type BlockBlobData, encodeBlockBlobData } from '@aztec/blob-lib/encoding'; +import { DA_GAS_PER_FIELD } from '@aztec/constants'; import { BlockNumber, CheckpointNumber, @@ -221,4 +222,15 @@ export class L2Block { timestamp: this.header.globalVariables.timestamp, }; } + + /** + * Compute how much DA gas this block uses. + * + * @remarks This assumes DA gas is computed solely based on the number of blob fields in transactions. + * This may change in the future, but we cannot access the actual DA gas used in a block since it's not exposed + * in the L2BlockHeader, so we have to rely on recomputing it. + */ + computeDAGasUsed(): number { + return this.body.txEffects.reduce((total, txEffect) => total + txEffect.getNumBlobFields(), 0) * DA_GAS_PER_FIELD; + } } diff --git a/yarn-project/stdlib/src/checkpoint/index.ts b/yarn-project/stdlib/src/checkpoint/index.ts index d86f88c87bbb..96c176e1d861 100644 --- a/yarn-project/stdlib/src/checkpoint/index.ts +++ b/yarn-project/stdlib/src/checkpoint/index.ts @@ -2,3 +2,4 @@ export * from './checkpoint.js'; export * from './checkpoint_data.js'; export * from './checkpoint_info.js'; export * from './published_checkpoint.js'; +export * from './validate.js'; diff --git a/yarn-project/stdlib/src/checkpoint/validate.ts b/yarn-project/stdlib/src/checkpoint/validate.ts new file mode 100644 index 000000000000..a89d9409f189 --- /dev/null +++ b/yarn-project/stdlib/src/checkpoint/validate.ts @@ -0,0 +1,114 @@ +import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import type { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { sum } from '@aztec/foundation/collection'; + +import type { Checkpoint } from './checkpoint.js'; + +export class CheckpointValidationError extends Error { + constructor( + message: string, + public readonly checkpointNumber: CheckpointNumber, + public readonly slot: SlotNumber, + ) { + super(message); + this.name = 'CheckpointValidationError'; + } +} + +/** + * Validates a checkpoint. Throws a CheckpointValidationError if any validation fails. + * - Validates checkpoint blob field count against maxBlobFields limit + * - Validates total L2 gas used by checkpoint blocks against the Rollup contract mana limit + * - Validates total DA gas used by checkpoint blocks against MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT + * - Validates individual block L2 gas and DA gas against maxL2BlockGas and maxDABlockGas limits + */ +export function validateCheckpoint( + checkpoint: Checkpoint, + opts: { + rollupManaLimit: number; + maxL2BlockGas: number | undefined; + maxDABlockGas: number | undefined; + }, +): void { + validateCheckpointLimits(checkpoint, opts); + validateCheckpointBlocksGasLimits(checkpoint, opts); +} + +/** Validates checkpoint blocks gas limits */ +function validateCheckpointBlocksGasLimits( + checkpoint: Checkpoint, + opts: { + maxL2BlockGas: number | undefined; + maxDABlockGas: number | undefined; + }, +): void { + const { maxL2BlockGas, maxDABlockGas } = opts; + + if (maxL2BlockGas !== undefined) { + for (const block of checkpoint.blocks) { + const blockL2Gas = block.header.totalManaUsed.toNumber(); + if (blockL2Gas > maxL2BlockGas) { + throw new CheckpointValidationError( + `Block ${block.number} in checkpoint has L2 gas used ${blockL2Gas} exceeding limit of ${maxL2BlockGas}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + } + + if (maxDABlockGas !== undefined) { + for (const block of checkpoint.blocks) { + const blockDAGas = block.computeDAGasUsed(); + if (blockDAGas > maxDABlockGas) { + throw new CheckpointValidationError( + `Block ${block.number} in checkpoint has DA gas used ${blockDAGas} exceeding limit of ${maxDABlockGas}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + } +} + +/** Validates checkpoint max blob fields and gas limits */ +function validateCheckpointLimits( + checkpoint: Checkpoint, + opts: { + rollupManaLimit: number; + }, +): void { + const { rollupManaLimit } = opts; + + const maxBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB; + const maxDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + + const checkpointMana = sum(checkpoint.blocks.map(block => block.header.totalManaUsed.toNumber())); + if (checkpointMana > rollupManaLimit) { + throw new CheckpointValidationError( + `Checkpoint mana cost ${checkpointMana} exceeds rollup limit of ${rollupManaLimit}`, + checkpoint.number, + checkpoint.slot, + ); + } + + const checkpointDAGas = sum(checkpoint.blocks.map(block => block.computeDAGasUsed())); + if (checkpointDAGas > maxDAGas) { + throw new CheckpointValidationError( + `Checkpoint DA gas cost ${checkpointDAGas} exceeds limit of ${maxDAGas}`, + checkpoint.number, + checkpoint.slot, + ); + } + + if (maxBlobFields !== undefined) { + const checkpointBlobFields = checkpoint.toBlobFields().length; + if (checkpointBlobFields > maxBlobFields) { + throw new CheckpointValidationError( + `Checkpoint blob field count ${checkpointBlobFields} exceeds limit of ${maxBlobFields}`, + checkpoint.number, + checkpoint.slot, + ); + } + } +} diff --git a/yarn-project/stdlib/src/interfaces/block-builder.ts b/yarn-project/stdlib/src/interfaces/block-builder.ts index f0c4eb780468..07a986ab2384 100644 --- a/yarn-project/stdlib/src/interfaces/block-builder.ts +++ b/yarn-project/stdlib/src/interfaces/block-builder.ts @@ -36,11 +36,16 @@ export interface IBlockFactory extends ProcessedTxHandler { } export interface PublicProcessorLimits { + /** Maximum number of txs to process. */ maxTransactions?: number; - maxBlockSize?: number; + /** L2 and DA gas limits. */ maxBlockGas?: Gas; + /** Maximum number of blob fields allowed. */ maxBlobFields?: number; + /** Deadline for processing the txs. Processor will stop as soon as it hits this time. */ deadline?: Date; + /** Whether this processor is building a proposal (as opposed to re-executing one). Skipping txs due to gas or blob limits is only done during proposal building. */ + isBuildingProposal?: boolean; } export interface PublicProcessorValidator { @@ -52,8 +57,16 @@ export type FullNodeBlockBuilderConfig = Pick & Pick< SequencerConfig, - 'txPublicSetupAllowListExtend' | 'fakeProcessingDelayPerTxMs' | 'fakeThrowAfterProcessingTxCount' - >; + | 'txPublicSetupAllowListExtend' + | 'fakeProcessingDelayPerTxMs' + | 'fakeThrowAfterProcessingTxCount' + | 'maxTxsPerBlock' + | 'maxL2BlockGas' + | 'maxDABlockGas' + > & { + /** Total L2 gas (mana) allowed per checkpoint. Fetched from L1 getManaLimit(). */ + rollupManaLimit: number; + }; export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[] = [ 'l1GenesisTime', @@ -63,6 +76,10 @@ export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[ 'txPublicSetupAllowListExtend', 'fakeProcessingDelayPerTxMs', 'fakeThrowAfterProcessingTxCount', + 'maxTxsPerBlock', + 'maxL2BlockGas', + 'maxDABlockGas', + 'rollupManaLimit', ] as const; /** Thrown when no valid transactions are available to include in a block after processing, and this is not the first block in a checkpoint. */ @@ -76,12 +93,10 @@ export class NoValidTxsError extends Error { /** Result of building a block within a checkpoint. */ export type BuildBlockInCheckpointResult = { block: L2Block; - publicGas: Gas; publicProcessorDuration: number; numTxs: number; failedTxs: FailedTx[]; usedTxs: Tx[]; - usedTxBlobFields: number; }; /** Interface for building blocks within a checkpoint context. */ diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index 88c7db90d3eb..bb18db1ee5fc 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -23,6 +23,8 @@ export interface SequencerConfig { maxL2BlockGas?: number; /** The maximum DA block gas. */ maxDABlockGas?: number; + /** Per-block gas budget multiplier for both L2 and DA gas. Budget = (checkpointLimit / maxBlocks) * multiplier. */ + gasPerBlockAllocationMultiplier?: number; /** Recipient of block reward. */ coinbase?: EthAddress; /** Address to receive fees. */ @@ -33,8 +35,6 @@ export interface SequencerConfig { acvmBinaryPath?: string; /** Additional entries to extend the default setup allow list. */ txPublicSetupAllowListExtend?: AllowedElement[]; - /** Max block size */ - maxBlockSizeInBytes?: number; /** Payload address to vote for */ governanceProposerPayload?: EthAddress; /** Whether to enforce the time table when building blocks */ @@ -90,12 +90,12 @@ export const SequencerConfigSchema = zodFor()( maxL2BlockGas: z.number().optional(), publishTxsWithProposals: z.boolean().optional(), maxDABlockGas: z.number().optional(), + gasPerBlockAllocationMultiplier: z.number().optional(), coinbase: schemas.EthAddress.optional(), feeRecipient: schemas.AztecAddress.optional(), acvmWorkingDirectory: z.string().optional(), acvmBinaryPath: z.string().optional(), txPublicSetupAllowListExtend: z.array(AllowedElementSchema).optional(), - maxBlockSizeInBytes: z.number().optional(), governanceProposerPayload: schemas.EthAddress.optional(), l1PublishingTime: z.number().optional(), enforceTimeTable: z.boolean().optional(), @@ -134,7 +134,10 @@ type SequencerConfigOptionalKeys = | 'l1PublishingTime' | 'txPublicSetupAllowListExtend' | 'minValidTxsPerBlock' - | 'minBlocksForCheckpoint'; + | 'minBlocksForCheckpoint' + | 'maxL2BlockGas' + | 'maxDABlockGas' + | 'gasPerBlockAllocationMultiplier'; export type ResolvedSequencerConfig = Prettify< Required> & Pick diff --git a/yarn-project/stdlib/src/kernel/private_kernel_tail_circuit_public_inputs.ts b/yarn-project/stdlib/src/kernel/private_kernel_tail_circuit_public_inputs.ts index b3b8b8f79f4a..3bf2c6787830 100644 --- a/yarn-project/stdlib/src/kernel/private_kernel_tail_circuit_public_inputs.ts +++ b/yarn-project/stdlib/src/kernel/private_kernel_tail_circuit_public_inputs.ts @@ -234,6 +234,15 @@ export class PrivateKernelTailCircuitPublicInputs { return noteHashes.filter(n => !n.isZero()); } + getNonEmptyL2ToL1Msgs() { + const l2ToL1Msgs = this.forPublic + ? this.forPublic.nonRevertibleAccumulatedData.l2ToL1Msgs.concat( + this.forPublic.revertibleAccumulatedData.l2ToL1Msgs, + ) + : this.forRollup!.end.l2ToL1Msgs; + return l2ToL1Msgs.filter(m => !m.isEmpty()); + } + getNonEmptyNullifiers() { const nullifiers = this.forPublic ? this.forPublic.nonRevertibleAccumulatedData.nullifiers.concat( diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index ceffb21c01a8..79d33955c3d6 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -98,6 +98,7 @@ export const mockTx = async ( publicCalldataSize = 2, feePayer, chonkProof = ChonkProof.random(), + gasLimits, maxFeesPerGas = new GasFees(10, 10), maxPriorityFeesPerGas, gasUsed = Gas.empty(), @@ -114,6 +115,7 @@ export const mockTx = async ( publicCalldataSize?: number; feePayer?: AztecAddress; chonkProof?: ChonkProof; + gasLimits?: Gas; maxFeesPerGas?: GasFees; maxPriorityFeesPerGas?: GasFees; gasUsed?: Gas; @@ -132,7 +134,7 @@ export const mockTx = async ( const data = PrivateKernelTailCircuitPublicInputs.empty(); const firstNullifier = new Nullifier(new Fr(seed + 1), Fr.ZERO, 0); data.constants.anchorBlockHeader = anchorBlockHeader; - data.constants.txContext.gasSettings = GasSettings.default({ maxFeesPerGas, maxPriorityFeesPerGas }); + data.constants.txContext.gasSettings = GasSettings.default({ gasLimits, maxFeesPerGas, maxPriorityFeesPerGas }); data.feePayer = feePayer ?? (await AztecAddress.random()); data.gasUsed = gasUsed; data.constants.txContext.chainId = chainId; diff --git a/yarn-project/stdlib/src/tx/tx.test.ts b/yarn-project/stdlib/src/tx/tx.test.ts index 500178d46be8..8dc3affa5880 100644 --- a/yarn-project/stdlib/src/tx/tx.test.ts +++ b/yarn-project/stdlib/src/tx/tx.test.ts @@ -1,6 +1,15 @@ +import { PRIVATE_LOG_SIZE_IN_FIELDS } from '@aztec/constants'; +import { makeTuple } from '@aztec/foundation/array'; import { randomBytes } from '@aztec/foundation/crypto/random'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { jsonStringify } from '@aztec/foundation/json-rpc'; +import { AztecAddress } from '../aztec-address/index.js'; +import { LogHash, ScopedLogHash } from '../kernel/log_hash.js'; +import { PrivateKernelTailCircuitPublicInputs } from '../kernel/private_kernel_tail_circuit_public_inputs.js'; +import { PrivateLog } from '../logs/private_log.js'; +import { L2ToL1Message, ScopedL2ToL1Message } from '../messaging/l2_to_l1_message.js'; import { mockTx } from '../tests/mocks.js'; import { Tx, TxArray } from './tx.js'; @@ -16,6 +25,105 @@ describe('Tx', () => { const json = jsonStringify(tx); expect(await Tx.schema.parseAsync(JSON.parse(json))).toEqual(tx); }); + + describe('getPrivateTxEffectsSizeInFields', () => { + function makePrivateOnlyTx() { + const data = PrivateKernelTailCircuitPublicInputs.emptyWithNullifier(); + return Tx.from({ + txHash: Tx.random().txHash, + data, + chonkProof: Tx.random().chonkProof, + contractClassLogFields: [], + publicFunctionCalldata: [], + }); + } + + const someAddress = AztecAddress.fromField(new Fr(27)); + + it('returns overhead only for tx with just a nullifier', () => { + const tx = makePrivateOnlyTx(); + // 3 fields overhead + 1 nullifier (from emptyWithNullifier) + expect(tx.getPrivateTxEffectsSizeInFields()).toBe(3 + 1); + }); + + it('counts note hashes', () => { + const tx = makePrivateOnlyTx(); + const end = tx.data.forRollup!.end; + end.noteHashes[0] = Fr.random(); + end.noteHashes[1] = Fr.random(); + // 3 overhead + 1 nullifier + 2 note hashes + expect(tx.getPrivateTxEffectsSizeInFields()).toBe(3 + 1 + 2); + }); + + it('counts nullifiers', () => { + const tx = makePrivateOnlyTx(); + const end = tx.data.forRollup!.end; + end.nullifiers[1] = Fr.random(); + end.nullifiers[2] = Fr.random(); + // 3 overhead + 3 nullifiers (1 from emptyWithNullifier + 2 new) + expect(tx.getPrivateTxEffectsSizeInFields()).toBe(3 + 3); + }); + + it('counts L2 to L1 messages', () => { + const tx = makePrivateOnlyTx(); + const end = tx.data.forRollup!.end; + end.l2ToL1Msgs[0] = new ScopedL2ToL1Message(new L2ToL1Message(EthAddress.random(), Fr.random()), someAddress); + // 3 overhead + 1 nullifier + 1 L2-to-L1 message + expect(tx.getPrivateTxEffectsSizeInFields()).toBe(3 + 1 + 1); + }); + + it('counts private logs with length field', () => { + const tx = makePrivateOnlyTx(); + const end = tx.data.forRollup!.end; + const emittedLength = 5; + end.privateLogs[0] = new PrivateLog(makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, Fr.random), emittedLength); + // 3 overhead + 1 nullifier + (5 content + 1 length field) + expect(tx.getPrivateTxEffectsSizeInFields()).toBe(3 + 1 + 6); + }); + + it('counts contract class logs with contract address field', () => { + const tx = makePrivateOnlyTx(); + const end = tx.data.forRollup!.end; + const logLength = 10; + end.contractClassLogsHashes[0] = new ScopedLogHash(new LogHash(Fr.random(), logLength), someAddress); + // 3 overhead + 1 nullifier + (10 content + 1 contract address) + expect(tx.getPrivateTxEffectsSizeInFields()).toBe(3 + 1 + 11); + }); + + it('counts all side effects together', () => { + const tx = makePrivateOnlyTx(); + const end = tx.data.forRollup!.end; + + // 2 additional nullifiers (1 already from emptyWithNullifier) + end.nullifiers[1] = Fr.random(); + end.nullifiers[2] = Fr.random(); + + // 3 note hashes + end.noteHashes[0] = Fr.random(); + end.noteHashes[1] = Fr.random(); + end.noteHashes[2] = Fr.random(); + + // 1 L2-to-L1 message + end.l2ToL1Msgs[0] = new ScopedL2ToL1Message(new L2ToL1Message(EthAddress.random(), Fr.random()), someAddress); + + // 2 private logs with different lengths + end.privateLogs[0] = new PrivateLog(makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, Fr.random), 4); + end.privateLogs[1] = new PrivateLog(makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, Fr.random), 7); + + // 1 contract class log + end.contractClassLogsHashes[0] = new ScopedLogHash(new LogHash(Fr.random(), 12), someAddress); + + const expected = + 3 + // overhead + 3 + // note hashes + 3 + // nullifiers + 1 + // L2-to-L1 messages + (4 + 1) + // first private log (content + length) + (7 + 1) + // second private log (content + length) + (12 + 1); // contract class log (content + contract address) + expect(tx.getPrivateTxEffectsSizeInFields()).toBe(expected); + }); + }); }); describe('TxArray', () => { diff --git a/yarn-project/stdlib/src/tx/tx.ts b/yarn-project/stdlib/src/tx/tx.ts index 8bbb1cc9c538..6ff10f6372bd 100644 --- a/yarn-project/stdlib/src/tx/tx.ts +++ b/yarn-project/stdlib/src/tx/tx.ts @@ -1,8 +1,9 @@ +import { DA_GAS_PER_FIELD, TX_DA_GAS_OVERHEAD } from '@aztec/constants'; import { Buffer32 } from '@aztec/foundation/buffer'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { ZodFor } from '@aztec/foundation/schemas'; import { BufferReader, serializeArrayOfBufferableToVector, serializeToBuffer } from '@aztec/foundation/serialize'; -import type { FieldsOf } from '@aztec/foundation/types'; +import { type FieldsOf, unfreeze } from '@aztec/foundation/types'; import { z } from 'zod'; @@ -264,16 +265,24 @@ export class Tx extends Gossipable { } /** - * Estimates the tx size based on its private effects. Note that the actual size of the tx - * after processing will probably be larger, as public execution would generate more data. + * Returns the number of fields this tx's effects will occupy in the blob, + * based on its private side effects only. Accurate for txs without public calls. + * For txs with public calls, the actual size will be larger due to public execution outputs. */ - getEstimatedPrivateTxEffectsSize() { - return ( - this.data.getNonEmptyNoteHashes().length * Fr.SIZE_IN_BYTES + - this.data.getNonEmptyNullifiers().length * Fr.SIZE_IN_BYTES + - this.data.getEmittedPrivateLogsLength() * Fr.SIZE_IN_BYTES + - this.data.getEmittedContractClassLogsLength() * Fr.SIZE_IN_BYTES - ); + getPrivateTxEffectsSizeInFields(): number { + // 3 fields overhead: tx_start_marker, tx_hash, tx_fee. + // TX_DA_GAS_OVERHEAD is defined as N * DA_GAS_PER_FIELD, so this division is always exact. + const overheadFields = TX_DA_GAS_OVERHEAD / DA_GAS_PER_FIELD; + const noteHashes = this.data.getNonEmptyNoteHashes().length; + const nullifiers = this.data.getNonEmptyNullifiers().length; + const l2ToL1Msgs = this.data.getNonEmptyL2ToL1Msgs().length; + // Each private log occupies (emittedLength + 1) fields: content + length field + const privateLogFields = this.data.getNonEmptyPrivateLogs().reduce((acc, log) => acc + log.emittedLength + 1, 0); + // Each contract class log occupies (length + 1) fields: content + contract address + const contractClassLogFields = this.data + .getNonEmptyContractClassLogsHashes() + .reduce((acc, log) => acc + log.logHash.length + 1, 0); + return overheadFields + noteHashes + nullifiers + l2ToL1Msgs + privateLogFields + contractClassLogFields; } /** @@ -309,7 +318,7 @@ export class Tx extends Gossipable { /** Recomputes the tx hash. Used for testing purposes only when a property of the tx was mutated. */ public async recomputeHash(): Promise { - (this as any).txHash = await Tx.computeTxHash(this); + unfreeze(this).txHash = await Tx.computeTxHash(this); return this.txHash; } diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index bb232bc28184..9c25164b0c7c 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -223,6 +223,42 @@ This is useful for monitoring network health without participating in consensus. - `createCheckpointProposal(...)` → `CheckpointProposal`: Signs checkpoint proposal - `attestToCheckpointProposal(proposal, attestors)` → `CheckpointAttestation[]`: Creates attestations for given addresses +## Block Building Limits + +L1 enforces gas and blob capacity per checkpoint. The node enforces these during block building to avoid L1 rejection. Three dimensions are metered: L2 gas (mana), DA gas, and blob fields. DA gas maps to blob fields today (`daGas = blobFields * 32`) but both are tracked independently. + +### Checkpoint limits + +| Dimension | Source | Budget | +| --- | --- | --- | +| L2 gas (mana) | `rollup.getManaLimit()` | Fetched from L1 at startup | +| DA gas | `MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT` | 786,432 (6 blobs × 4096 fields × 32 gas/field) | +| Blob fields | `BLOBS_PER_CHECKPOINT × FIELDS_PER_BLOB` | 24,576 minus checkpoint/block-end overhead | + +### Per-block budgets + +Per-block budgets prevent one block from consuming the entire checkpoint budget. + +**Proposer**: `SequencerClient.computeBlockGasLimits()` derives budgets at startup as `min(checkpointLimit, ceil(checkpointLimit / maxBlocks * multiplier))`, where `maxBlocks` comes from the timetable and `multiplier` defaults to 2. The multiplier greater than 1 allows early blocks to use more than their even share of the checkpoint budget, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. Operators can override via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` (capped at checkpoint limits). + +**Validator**: Does not enforce per-block gas budgets. Only checkpoint-level limits are checked, so that proposers can freely distribute capacity across blocks within a checkpoint. + +**Checkpoint-level capping**: `CheckpointBuilder.capLimitsByCheckpointBudgets()` always runs before tx processing, capping per-block limits by `checkpointBudget - sum(used by prior blocks)` for all three dimensions. This applies to both proposer and validator paths. + +### Per-transaction enforcement + +**Mempool entry** (`GasLimitsValidator`): L2 gas must be ≤ `MAX_PROCESSABLE_L2_GAS` (6,540,000) and ≥ fixed minimums. + +**Block building** (`PublicProcessor.process`): Before processing, txs are skipped if their estimated blob fields or gas limits would exceed the block budget. After processing, actual values are checked and the tx is reverted if limits are exceeded. + +### Gas limit configuration + +| Variable | Default | Description | +| --- | --- | --- | +| `SEQ_MAX_L2_BLOCK_GAS` | *auto* | Per-block L2 gas. Auto-derived from `rollupManaLimit / maxBlocks * multiplier`. | +| `SEQ_MAX_DA_BLOCK_GAS` | *auto* | Per-block DA gas. Auto-derived from checkpoint DA limit / maxBlocks * multiplier. | +| `SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER` | 2 | Multiplier for per-block budget computation. | + ## Testing Patterns ### Common Mocks diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index abf782d6b8ea..19f307fa5a0b 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -1,3 +1,10 @@ +import { NUM_CHECKPOINT_END_MARKER_FIELDS, getNumBlockEndBlobFields } from '@aztec/blob-lib/encoding'; +import { + BLOBS_PER_CHECKPOINT, + DA_GAS_PER_FIELD, + FIELDS_PER_BLOB, + MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT, +} from '@aztec/constants'; import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -12,6 +19,7 @@ import { type FullNodeBlockBuilderConfig, type MerkleTreeWriteOperations, NoValidTxsError, + type PublicProcessorLimits, type PublicProcessorValidator, } from '@aztec/stdlib/interfaces/server'; import type { CheckpointGlobalVariables, GlobalVariables, ProcessedTx, Tx } from '@aztec/stdlib/tx'; @@ -51,26 +59,34 @@ describe('CheckpointBuilder', () => { public override makeBlockBuilderDeps(_globalVariables: GlobalVariables, _fork: MerkleTreeWriteOperations) { return Promise.resolve({ processor, validator }); } + + /** Expose for testing */ + public testCapLimits(opts: PublicProcessorLimits) { + return this.capLimitsByCheckpointBudgets(opts); + } } - beforeEach(() => { - lightweightCheckpointBuilder = mock({ checkpointNumber, constants }); + /** Creates a mock block with the given mana, tx blob fields, and total block blob fields. */ + function createMockBlock(opts: { manaUsed: number; txBlobFields: number[]; blockBlobFieldCount: number }) { + return { + header: { totalManaUsed: { toNumber: () => opts.manaUsed } }, + body: { + txEffects: opts.txBlobFields.map(n => ({ getNumBlobFields: () => n })), + }, + toBlobFields: () => new Array(opts.blockBlobFieldCount).fill(Fr.ZERO), + computeDAGasUsed: () => opts.txBlobFields.reduce((total, n) => total + n, 0) * DA_GAS_PER_FIELD, + } as unknown as L2Block; + } - fork = mock(); + function setupBuilder(overrideConfig?: Partial) { config = { l1GenesisTime: 0n, slotDuration: 24, l1ChainId: 1, rollupVersion: 1, + rollupManaLimit: 200_000_000, + ...overrideConfig, }; - contractDataSource = mock(); - dateProvider = new TestDateProvider(); - telemetryClient = mock(); - telemetryClient.getMeter.mockReturnValue(mock()); - telemetryClient.getTracer.mockReturnValue(mock()); - - processor = mock(); - validator = mock(); checkpointBuilder = new TestCheckpointBuilder( lightweightCheckpointBuilder as unknown as LightweightCheckpointBuilder, @@ -80,6 +96,23 @@ describe('CheckpointBuilder', () => { dateProvider, telemetryClient, ); + } + + beforeEach(() => { + lightweightCheckpointBuilder = mock({ checkpointNumber, constants }); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + fork = mock(); + contractDataSource = mock(); + dateProvider = new TestDateProvider(); + telemetryClient = mock(); + telemetryClient.getMeter.mockReturnValue(mock()); + telemetryClient.getTracer.mockReturnValue(mock()); + + processor = mock(); + validator = mock(); + + setupBuilder(); }); describe('buildBlock', () => { @@ -90,11 +123,10 @@ describe('CheckpointBuilder', () => { lightweightCheckpointBuilder.addBlock.mockResolvedValue({ block: expectedBlock, timings: {} }); processor.process.mockResolvedValue([ - [{ hash: Fr.random(), gasUsed: { publicGas: Gas.empty() } } as unknown as ProcessedTx], + [{ hash: Fr.random() } as unknown as ProcessedTx], [], // failedTxs [], // usedTxs [], // returnValues - 0, // usedTxBlobFields [], // debugLogs ]); @@ -118,7 +150,6 @@ describe('CheckpointBuilder', () => { [], // failedTxs [], // usedTxs [], // returnValues - 0, // usedTxBlobFields [], // debugLogs ]); @@ -138,7 +169,6 @@ describe('CheckpointBuilder', () => { [failedTx], // failedTxs [], // usedTxs [], // returnValues - 0, // usedTxBlobFields [], // debugLogs ]); @@ -147,4 +177,173 @@ describe('CheckpointBuilder', () => { expect(lightweightCheckpointBuilder.addBlock).not.toHaveBeenCalled(); }); }); + + describe('capLimitsByCheckpointBudgets', () => { + const totalBlobCapacity = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB - NUM_CHECKPOINT_END_MARKER_FIELDS; + const firstBlockEndOverhead = getNumBlockEndBlobFields(true); + const nonFirstBlockEndOverhead = getNumBlockEndBlobFields(false); + + it('caps L2 gas by remaining checkpoint mana', () => { + const rollupManaLimit = 1_000_000; + const priorManaUsed = 600_000; + setupBuilder({ rollupManaLimit }); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 800_000) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 1_000_000 - 600_000 = 400_000. Per-block = 800_000. Capped to 400_000. + expect(capped.maxBlockGas!.l2Gas).toBe(400_000); + }); + + it('uses per-block L2 gas limit when tighter than remaining mana', () => { + const rollupManaLimit = 1_000_000; + const priorManaUsed = 200_000; + setupBuilder({ rollupManaLimit }); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 500_000) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 800_000. Per-block = 500_000. Uses 500_000. + expect(capped.maxBlockGas!.l2Gas).toBe(500_000); + }); + + it('uses per-block L2 gas limit when remaining mana is larger', () => { + setupBuilder(); // rollupManaLimit defaults to 200_000_000 + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 100_000, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 500_000) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 200_000_000 - 100_000 >> 500_000, so per-block limit is used + expect(capped.maxBlockGas!.l2Gas).toBe(500_000); + }); + + it('caps DA gas by remaining checkpoint DA gas budget', () => { + // Each prior tx blob field = DA_GAS_PER_FIELD DA gas + const txBlobFields = [1000]; // 1000 fields * 32 = 32000 DA gas + const priorDAGas = 1000 * DA_GAS_PER_FIELD; + setupBuilder(); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields, blockBlobFieldCount: 1010 }), + ]); + + const perBlockDAGas = 500_000; + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(perBlockDAGas, Infinity) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining DA gas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - priorDAGas + const expectedRemainingDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - priorDAGas; + expect(capped.maxBlockGas!.daGas).toBe(Math.min(perBlockDAGas, expectedRemainingDAGas)); + }); + + it('sets maxBlockGas from remaining budgets when caller does not provide it', () => { + const rollupManaLimit = 1_000_000; + const priorManaUsed = 600_000; + setupBuilder({ rollupManaLimit }); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [100], blockBlobFieldCount: 110 }), + ]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + expect(capped.maxBlockGas!.l2Gas).toBe(400_000); + expect(capped.maxBlockGas!.daGas).toBe(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - 100 * DA_GAS_PER_FIELD); + }); + + it('caps blob fields by remaining checkpoint blob capacity', () => { + const blockBlobFieldCount = 100; // Prior block used 100 blob fields + setupBuilder(); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount }), + ]); + + const opts: PublicProcessorLimits = { maxBlobFields: 99999 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Second block: remaining = totalBlobCapacity - 100, minus non-first block end overhead + const expectedMaxBlobFields = totalBlobCapacity - blockBlobFieldCount - nonFirstBlockEndOverhead; + expect(capped.maxBlobFields).toBe(expectedMaxBlobFields); + }); + + it('sets blob fields from remaining capacity when caller does not set them', () => { + setupBuilder(); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // First block: full capacity minus first block end overhead + const expectedMaxBlobFields = totalBlobCapacity - firstBlockEndOverhead; + expect(capped.maxBlobFields).toBe(expectedMaxBlobFields); + }); + + it('accumulates limits across multiple prior blocks', () => { + const rollupManaLimit = 1_000_000; + setupBuilder({ rollupManaLimit }); + + const block1 = createMockBlock({ manaUsed: 300_000, txBlobFields: [200], blockBlobFieldCount: 210 }); + const block2 = createMockBlock({ manaUsed: 200_000, txBlobFields: [150], blockBlobFieldCount: 160 }); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1, block2]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, Infinity) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 1_000_000 - 300_000 - 200_000 = 500_000 + expect(capped.maxBlockGas!.l2Gas).toBe(500_000); + + // Remaining DA gas = MAX - (200 + 150) * DA_GAS_PER_FIELD + const expectedRemainingDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - (200 + 150) * DA_GAS_PER_FIELD; + expect(capped.maxBlockGas!.daGas).toBe(expectedRemainingDAGas); + + // Remaining blob fields = capacity - 210 - 160 - nonFirstBlockEndOverhead + const expectedBlobFields = totalBlobCapacity - 210 - 160 - nonFirstBlockEndOverhead; + expect(capped.maxBlobFields).toBe(expectedBlobFields); + }); + + it('tracks remaining blob field capacity across multiple blocks', () => { + setupBuilder(); + + const block1BlobFieldCount = 200; + const block2BlobFieldCount = 150; + + // After one block has been built, remaining capacity should account for that block's usage + const block1 = createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount: block1BlobFieldCount }); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1]); + + const afterOneBlock = (checkpointBuilder as TestCheckpointBuilder).testCapLimits({}); + + const expectedAfterOneBlock = totalBlobCapacity - block1BlobFieldCount - nonFirstBlockEndOverhead; + expect(afterOneBlock.maxBlobFields).toBe(expectedAfterOneBlock); + + // After two blocks have been built, remaining capacity should further decrease + const block2 = createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount: block2BlobFieldCount }); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1, block2]); + + const afterTwoBlocks = (checkpointBuilder as TestCheckpointBuilder).testCapLimits({}); + + const expectedAfterTwoBlocks = + totalBlobCapacity - block1BlobFieldCount - block2BlobFieldCount - nonFirstBlockEndOverhead; + expect(afterTwoBlocks.maxBlobFields).toBe(expectedAfterTwoBlocks); + + // Verify the limit actually decreased between calls + expect(afterTwoBlocks.maxBlobFields).toBeLessThan(afterOneBlock.maxBlobFields!); + expect(afterOneBlock.maxBlobFields! - afterTwoBlocks.maxBlobFields!).toBe(block2BlobFieldCount); + }); + }); }); diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index 74059c27ce35..3c387b1da257 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -1,5 +1,7 @@ +import { NUM_CHECKPOINT_END_MARKER_FIELDS, getNumBlockEndBlobFields } from '@aztec/blob-lib/encoding'; +import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; -import { merge, pick } from '@aztec/foundation/collection'; +import { merge, pick, sum } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log'; import { bufferToHex } from '@aztec/foundation/string'; @@ -65,6 +67,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { /** * Builds a single block within this checkpoint. + * Automatically caps gas and blob field limits based on checkpoint-level budgets and prior blocks. */ async buildBlock( pendingTxs: Iterable | AsyncIterable, @@ -94,8 +97,14 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { }); const { processor, validator } = await this.makeBlockBuilderDeps(globalVariables, this.fork); - const [publicProcessorDuration, [processedTxs, failedTxs, usedTxs, _, usedTxBlobFields]] = await elapsed(() => - processor.process(pendingTxs, opts, validator), + // Cap gas limits amd available blob fields by remaining checkpoint-level budgets + const cappedOpts: PublicProcessorLimits & { expectedEndState?: StateReference } = { + ...opts, + ...this.capLimitsByCheckpointBudgets(opts), + }; + + const [publicProcessorDuration, [processedTxs, failedTxs, usedTxs]] = await elapsed(() => + processor.process(pendingTxs, cappedOpts, validator), ); // Throw if we didn't collect a single valid tx and we're not allowed to build empty blocks @@ -109,9 +118,6 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { expectedEndState: opts.expectedEndState, }); - // How much public gas was processed - const publicGas = processedTxs.reduce((acc, tx) => acc.add(tx.gasUsed.publicGas), Gas.empty()); - this.log.debug('Built block within checkpoint', { header: block.header.toInspect(), processedTxs: processedTxs.map(tx => tx.hash.toString()), @@ -120,12 +126,10 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { return { block, - publicGas, publicProcessorDuration, numTxs: processedTxs.length, failedTxs, usedTxs, - usedTxBlobFields, }; } @@ -147,6 +151,49 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { return this.checkpointBuilder.clone().completeCheckpoint(); } + /** + * Caps per-block gas and blob field limits by remaining checkpoint-level budgets. + * Computes remaining L2 gas (mana), DA gas, and blob fields from blocks already added to the checkpoint, + * then returns opts with maxBlockGas and maxBlobFields capped accordingly. + */ + protected capLimitsByCheckpointBudgets( + opts: PublicProcessorLimits, + ): Pick { + const existingBlocks = this.checkpointBuilder.getBlocks(); + + // Remaining L2 gas (mana) + // IMPORTANT: This assumes mana is computed solely based on L2 gas used in transactions. + // This may change in the future. + const usedMana = sum(existingBlocks.map(b => b.header.totalManaUsed.toNumber())); + const remainingMana = this.config.rollupManaLimit - usedMana; + + // Remaining DA gas + const usedDAGas = sum(existingBlocks.map(b => b.computeDAGasUsed())) ?? 0; + const remainingDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - usedDAGas; + + // Remaining blob fields (block blob fields include both tx data and block-end overhead) + const usedBlobFields = sum(existingBlocks.map(b => b.toBlobFields().length)); + const totalBlobCapacity = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB - NUM_CHECKPOINT_END_MARKER_FIELDS; + const isFirstBlock = existingBlocks.length === 0; + const blockEndOverhead = getNumBlockEndBlobFields(isFirstBlock); + const maxBlobFieldsForTxs = totalBlobCapacity - usedBlobFields - blockEndOverhead; + + // Cap L2 gas by remaining checkpoint mana + const cappedL2Gas = Math.min(opts.maxBlockGas?.l2Gas ?? remainingMana, remainingMana); + + // Cap DA gas by remaining checkpoint DA gas budget + const cappedDAGas = Math.min(opts.maxBlockGas?.daGas ?? remainingDAGas, remainingDAGas); + + // Cap blob fields by remaining checkpoint blob capacity + const cappedBlobFields = + opts.maxBlobFields !== undefined ? Math.min(opts.maxBlobFields, maxBlobFieldsForTxs) : maxBlobFieldsForTxs; + + return { + maxBlockGas: new Gas(cappedDAGas, cappedL2Gas), + maxBlobFields: cappedBlobFields, + }; + } + protected async makeBlockBuilderDeps(globalVariables: GlobalVariables, fork: MerkleTreeWriteOperations) { const txPublicSetupAllowList = [ ...(await getDefaultAllowedSetupFunctions()), diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts index 5370ba592af5..cba52926ec05 100644 --- a/yarn-project/validator-client/src/validator.ha.integration.test.ts +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -90,6 +90,7 @@ describe('ValidatorClient HA Integration', () => { slotDuration: 24, l1ChainId: 1, rollupVersion: 1, + rollupManaLimit: 200_000_000, }); worldState = mock(); epochCache = mock(); diff --git a/yarn-project/validator-client/src/validator.integration.test.ts b/yarn-project/validator-client/src/validator.integration.test.ts index 811a84927be0..967f3da9e042 100644 --- a/yarn-project/validator-client/src/validator.integration.test.ts +++ b/yarn-project/validator-client/src/validator.integration.test.ts @@ -23,7 +23,7 @@ import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block'; import { L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import { type L1RollupConstants, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; -import { GasFees } from '@aztec/stdlib/gas'; +import { Gas, GasFees } from '@aztec/stdlib/gas'; import { tryStop } from '@aztec/stdlib/interfaces/server'; import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; import { type BlockProposal, CheckpointProposal } from '@aztec/stdlib/p2p'; @@ -127,6 +127,7 @@ describe('ValidatorClient Integration', () => { slotDuration: l1Constants.slotDuration, l1ChainId: chainId.toNumber(), rollupVersion: version.toNumber(), + rollupManaLimit: 200_000_000, txPublicSetupAllowListExtend: [], }, synchronizer, @@ -242,6 +243,8 @@ describe('ValidatorClient Integration', () => { vkTreeRoot: getVKTreeRoot(), protocolContractsHash, anchorBlockHeader: anchorBlockHeader ?? genesisBlockHeader, + gasLimits: new Gas(100_000, 1_000_000), + gasUsed: new Gas(10_000, 100_000), maxFeesPerGas: new GasFees(1e12, 1e12), feePayer, }); @@ -564,6 +567,35 @@ describe('ValidatorClient Integration', () => { expect(isValid).toBe(false); }); + it('rejects block that would exceed checkpoint mana limit', async () => { + const { blocks } = await buildCheckpoint( + CheckpointNumber(1), + slotNumber, + emptyL1ToL2Messages, + emptyPreviousCheckpointOutHashes, + BlockNumber(1), + 3, + () => buildTxs(2), + ); + + // Measure total mana used by the first two blocks + const manaFirstTwo = + blocks[0].block.header.totalManaUsed.toNumber() + blocks[1].block.header.totalManaUsed.toNumber(); + + // Set rollupManaLimit to only cover the first two blocks' actual mana. + // Block 3 re-execution will have 0 remaining mana, so the actual gas check + // in the public processor will reject all txs, producing a tx count mismatch. + attestor.checkpointsBuilder.updateConfig({ rollupManaLimit: manaFirstTwo }); + + // Blocks 1 and 2 should validate successfully + await attestorValidateBlocks(blocks.slice(0, 2)); + + // Block 3 should fail: remaining checkpoint mana is 0, so the processor + // stops after the first tx's actual gas exceeds the limit. + const isValid = await attestor.validator.validateBlockProposal(blocks[2].proposal, mockPeerId); + expect(isValid).toBe(false); + }); + it('refuses block proposal with mismatching l1 to l2 messages', async () => { const l1ToL2Messages = makeInboxMessages(4, { messagesPerCheckpoint: 4 }); await proposer.archiver.dataStore.addL1ToL2Messages(l1ToL2Messages); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 7d9c4b975288..1477a94b01b2 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -25,7 +25,6 @@ import { OffenseType, WANT_TO_SLASH_EVENT } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { BlockData, L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import type { getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; -import { Gas } from '@aztec/stdlib/gas'; import type { SlasherConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; import type { BlockProposal } from '@aztec/stdlib/p2p'; @@ -110,6 +109,7 @@ describe('ValidatorClient', () => { slotDuration: 24, l1ChainId: 1, rollupVersion: 1, + rollupManaLimit: 200_000_000, }); worldState = mock(); epochCache = mock(); @@ -366,9 +366,7 @@ describe('ValidatorClient', () => { publicProcessorDuration: 0, numTxs: proposal.txHashes.length, failedTxs: [], - publicGas: Gas.empty(), usedTxs: [], - usedTxBlobFields: 0, block: { header: clonedBlockHeader, body: { txEffects: times(proposal.txHashes.length, () => TxEffect.empty()) }, diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 0fb99d4c6e41..2fad5db0bca7 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -24,6 +24,7 @@ import { AuthRequest, AuthResponse, BlockProposalValidator, ReqRespSubProtocol } import { OffenseType, WANT_TO_SLASH_EVENT, type Watcher, type WatcherEmitter } from '@aztec/slasher'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { CommitteeAttestationsAndSigners, L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; +import { validateCheckpoint } from '@aztec/stdlib/checkpoint'; import { getEpochAtSlot, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import type { CreateCheckpointProposalLastBlockData, @@ -766,6 +767,18 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) return { isValid: false, reason: 'out_hash_mismatch' }; } + // Final round of validations on the checkpoint, just in case. + try { + validateCheckpoint(computedCheckpoint, { + rollupManaLimit: this.checkpointsBuilder.getConfig().rollupManaLimit, + maxDABlockGas: undefined, + maxL2BlockGas: undefined, + }); + } catch (err) { + this.log.warn(`Checkpoint validation failed: ${err}`, proposalInfo); + return { isValid: false, reason: 'checkpoint_validation_failed' }; + } + this.log.verbose(`Checkpoint proposal validation successful for slot ${slot}`, proposalInfo); return { isValid: true }; } finally { diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts index 38a4e9ce81e5..ad85df243053 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.ts @@ -1,4 +1,4 @@ -import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM, INITIAL_L2_CHECKPOINT_NUM } from '@aztec/constants'; +import { GENESIS_BLOCK_HEADER_HASH, INITIAL_CHECKPOINT_NUMBER, INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import type { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, createLogger } from '@aztec/foundation/log'; @@ -263,15 +263,15 @@ export class ServerWorldStateSynchronizer proposed: latestBlockId, checkpointed: { block: { number: INITIAL_L2_BLOCK_NUM, hash: GENESIS_BLOCK_HEADER_HASH.toString() }, - checkpoint: { number: INITIAL_L2_CHECKPOINT_NUM, hash: genesisCheckpointHeaderHash }, + checkpoint: { number: INITIAL_CHECKPOINT_NUMBER, hash: genesisCheckpointHeaderHash }, }, finalized: { block: { number: status.finalizedBlockNumber, hash: finalizedBlockHash ?? '' }, - checkpoint: { number: INITIAL_L2_CHECKPOINT_NUM, hash: genesisCheckpointHeaderHash }, + checkpoint: { number: INITIAL_CHECKPOINT_NUMBER, hash: genesisCheckpointHeaderHash }, }, proven: { block: { number: provenBlockNumber, hash: provenBlockHash ?? '' }, - checkpoint: { number: INITIAL_L2_CHECKPOINT_NUM, hash: genesisCheckpointHeaderHash }, + checkpoint: { number: INITIAL_CHECKPOINT_NUMBER, hash: genesisCheckpointHeaderHash }, }, }; } From d471d2404744c478820c9b10bb16a8dfb07499de Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 3 Mar 2026 11:03:19 -0300 Subject: [PATCH 10/37] chore(e2e): fix e2e bot L1 tx nonce reuse (#21052) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes the following error caused by reusing the same L1 private key across multiple bots, without waiting for the txs from the previous bots to be done. ``` 13:06:39 ● e2e_bot › bridge resume › does not reuse prior bridge claims if recipient address changes 13:06:39 13:06:39 expect(received).rejects.toThrow(expected) 13:06:39 13:06:39 Expected substring: "test error" 13:06:39 Received message: "Transaction creation failed.· 13:06:39 URL: http://127.0.0.1:8545 13:06:39 Request body: {\"method\":\"eth_sendRawTransaction\",\"params\":[\"0x02f8b1827a6935843b9aca00843c028b4a82b54194a513e6e4b8f2a923d98304ec87f64353c4d5c85380b844095ea7b3000000000000000000000000846005fdb8e3f125749df47d36b2c826029e536400000000000000000000000000000000000000000000003635c9adc5dea00000c080a019eb199d74619635cc3f88492e1818ae26ece48c5dc102091b03be9a4cbc2df7a0047b03d47e4f034a239c639c6610d472e36a35b01c070f6b9072c1b67df6b9c4\"]}·· 13:06:39 Request Arguments: 13:06:39 from: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 13:06:39 to: 0xa513e6e4b8f2a923d98304ec87f64353c4d5c853 13:06:39 data: 0x095ea7b3000000000000000000000000846005fdb8e3f125749df47d36b2c826029e536400000000000000000000000000000000000000000000003635c9adc5dea00000·· 13:06:39 Contract Call: 13:06:39 address: 0xa513e6e4b8f2a923d98304ec87f64353c4d5c853 13:06:39 function: approve(address spender, uint256 value) 13:06:39 args: (0x846005fdb8e3f125749df47d36b2c826029e5364, 1000000000000000000000) 13:06:39 sender: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266· 13:06:39 Docs: https://viem.sh/docs/contract/writeContract 13:06:39 Details: replacement transaction underpriced 13:06:39 Version: viem@2.38.2" 13:06:39 13:06:39 87 | this.logger.info(`Approving ${amount} tokens for ${stringifyEthAddress(address, addressName)}`); 13:06:39 88 | await this.extendedClient.waitForTransactionReceipt({ 13:06:39 > 89 | hash: await this.contract.write.approve([ 13:06:39 | ^ 13:06:39 90 | address, 13:06:39 91 | amount 13:06:39 92 | ]) 13:06:39 13:06:39 at getContractError (../node_modules/viem/utils/errors/getContractError.ts:78:10) 13:06:39 at writeContract.internal (../node_modules/viem/actions/wallet/writeContract.ts:242:13) 13:06:39 at L1TokenManager.approve (../aztec.js/dest/ethereum/portal_manager.js:89:19) 13:06:39 at L1FeeJuicePortalManager.bridgeTokensPublic (../aztec.js/dest/ethereum/portal_manager.js:129:9) 13:06:39 at BotFactory.bridgeL1FeeJuice (../bot/dest/factory.js:450:23) 13:06:39 at BotFactory.getOrCreateBridgeClaim (../bot/dest/factory.js:432:23) 13:06:39 at BotFactory.setupAccountWithPrivateKey (../bot/dest/factory.js:170:27) 13:06:39 at BotFactory.setupAccount (../bot/dest/factory.js:149:20) 13:06:39 at BotFactory.setup (../bot/dest/factory.js:49:39) 13:06:39 at Bot.create (../bot/dest/bot.js:14:61) 13:06:39 at Object. (src/e2e_bot.test.ts:189:9) ``` Fix is to use different L1 private key per test. --- yarn-project/end-to-end/src/e2e_bot.test.ts | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_bot.test.ts b/yarn-project/end-to-end/src/e2e_bot.test.ts index 78a1f6affff9..9fb5d09ff8f7 100644 --- a/yarn-project/end-to-end/src/e2e_bot.test.ts +++ b/yarn-project/end-to-end/src/e2e_bot.test.ts @@ -52,6 +52,9 @@ describe('e2e_bot', () => { afterAll(() => teardown()); + let privateKeyIndex = 10; + const getPrivateKey = () => new SecretValue(bufferToHex(getPrivateKeyFromIndex(privateKeyIndex++)!)); + describe('transaction-bot', () => { let bot: Bot; beforeAll(async () => { @@ -131,8 +134,7 @@ describe('e2e_bot', () => { l1RpcUrls, feePaymentMethod: 'fee_juice', - // TODO: this should be taken from the `setup` call above - l1Mnemonic: new SecretValue('test test test test test test test test test test test junk'), + l1PrivateKey: getPrivateKey(), flushSetupTransactions: true, // Increase fee headroom to handle fee volatility from rapid block building in tests. // Fees can escalate >10x due to blocks built by earlier tests and bridge operations. @@ -172,8 +174,7 @@ describe('e2e_bot', () => { l1RpcUrls, feePaymentMethod: 'fee_juice', - // TODO: this should be taken from the `setup` call above - l1Mnemonic: new SecretValue('test test test test test test test test test test test junk'), + l1PrivateKey: getPrivateKey(), flushSetupTransactions: true, // Increase fee headroom to handle fee volatility from rapid block building in tests. // This test is especially susceptible because changing salt triggers a new bridge claim, @@ -238,7 +239,7 @@ describe('e2e_bot', () => { followChain: 'PROPOSED', botMode: 'transfer', senderPrivateKey: new SecretValue(Fr.random()), - l1PrivateKey: new SecretValue(bufferToHex(getPrivateKeyFromIndex(8)!)), + l1PrivateKey: getPrivateKey(), l1RpcUrls, flushSetupTransactions: true, }; @@ -261,7 +262,7 @@ describe('e2e_bot', () => { followChain: 'PROPOSED', botMode: 'crosschain', l1RpcUrls, - l1PrivateKey: new SecretValue(bufferToHex(getPrivateKeyFromIndex(9)!)), + l1PrivateKey: getPrivateKey(), flushSetupTransactions: true, l1ToL2SeedCount: 2, }; From 7bd1eaa2f43ddad5b40af4f00661dc58f97da075 Mon Sep 17 00:00:00 2001 From: PhilWindle <60546371+PhilWindle@users.noreply.github.com> Date: Tue, 3 Mar 2026 16:53:19 +0000 Subject: [PATCH 11/37] feat: Update L1 to L2 message APIs (#20913) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR fixes issue A-548 by replacing the block-based L1-to-L2 message readiness API with a checkpoint-based one. The core insight: messages are grouped by checkpoint number, not block number, so readiness should be checked against the checkpoint the node has synced to. Key changes: New API getCheckpointNumber() added to AztecNode, L2BlockSource, and archiver interfaces — returns the latest synced checkpoint number. Renamed getL1ToL2MessageBlock → getL1ToL2MessageCheckpoint — now returns CheckpointNumber instead of BlockNumber, eliminating the deprecated BlockNumber.fromCheckpointNumber cast. Simplified isL1ToL2MessageReady — removed the forPublicConsumption flag entirely. This now checks whether we have reached the first block in the message's inclusion checkpoint. Updated all callers — bot factory, cross-chain bot, e2e tests, spartan setup, and epoch tests. Removed all A-548 workaround comments. E2E test improvements — new helpers advanceCheckpoint and waitForBlockToCheckpoint for checkpoint-aware test flow. The drift test now mines checkpoints instead of blocks. --- .../archiver/src/store/message_store.ts | 2 +- .../archiver/src/test/mock_l2_block_source.ts | 6 + .../aztec-node/src/aztec-node/server.ts | 10 +- .../aztec.js/src/utils/cross_chain.ts | 27 ++-- yarn-project/bot/src/cross_chain_bot.ts | 9 +- yarn-project/bot/src/factory.ts | 7 - .../src/bench/node_rpc_perf.test.ts | 14 +- .../l1_to_l2.test.ts | 126 ++++++++++++------ .../e2e_epochs/epochs_mbps.parallel.test.ts | 2 +- .../epochs_proof_public_cross_chain.test.ts | 1 - .../src/spartan/setup_test_wallets.ts | 2 +- .../stdlib/src/block/l2_block_source.ts | 6 + .../stdlib/src/interfaces/archiver.test.ts | 8 ++ .../stdlib/src/interfaces/archiver.ts | 1 + .../stdlib/src/interfaces/aztec-node.test.ts | 16 ++- .../stdlib/src/interfaces/aztec-node.ts | 18 ++- 16 files changed, 162 insertions(+), 93 deletions(-) diff --git a/yarn-project/archiver/src/store/message_store.ts b/yarn-project/archiver/src/store/message_store.ts index 4c07ba9f9c86..0408f7f0c3c2 100644 --- a/yarn-project/archiver/src/store/message_store.ts +++ b/yarn-project/archiver/src/store/message_store.ts @@ -137,7 +137,7 @@ export class MessageStore { ); } - // Check the first message in a block has the correct index. + // Check the first message in a checkpoint has the correct index. if ( (!lastMessage || message.checkpointNumber > lastMessage.checkpointNumber) && message.index !== expectedStart diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index ff4a0fe4af52..da295c09cb96 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -42,6 +42,12 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { await this.createCheckpoints(numBlocks, 1); } + public getCheckpointNumber(): Promise { + return Promise.resolve( + this.checkpointList.length === 0 ? CheckpointNumber.ZERO : CheckpointNumber(this.checkpointList.length), + ); + } + /** Creates checkpoints, each containing `blocksPerCheckpoint` blocks. */ public async createCheckpoints(numCheckpoints: number, blocksPerCheckpoint: number = 1) { for (let c = 0; c < numCheckpoints; c++) { diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 0fe264bf11e6..6d1a4ce06bee 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -743,6 +743,10 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { return await this.blockSource.getCheckpointedL2BlockNumber(); } + public getCheckpointNumber(): Promise { + return this.blockSource.getCheckpointNumber(); + } + /** * Method to fetch the version of the package. * @returns The node package version @@ -1050,11 +1054,9 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { return [witness.index, witness.path]; } - public async getL1ToL2MessageBlock(l1ToL2Message: Fr): Promise { + public async getL1ToL2MessageCheckpoint(l1ToL2Message: Fr): Promise { const messageIndex = await this.l1ToL2MessageSource.getL1ToL2MessageIndex(l1ToL2Message); - return messageIndex - ? BlockNumber.fromCheckpointNumber(InboxLeaf.checkpointNumberFromIndex(messageIndex)) - : undefined; + return messageIndex ? InboxLeaf.checkpointNumberFromIndex(messageIndex) : undefined; } /** diff --git a/yarn-project/aztec.js/src/utils/cross_chain.ts b/yarn-project/aztec.js/src/utils/cross_chain.ts index 38c278fe9597..5ba97b49c758 100644 --- a/yarn-project/aztec.js/src/utils/cross_chain.ts +++ b/yarn-project/aztec.js/src/utils/cross_chain.ts @@ -8,17 +8,15 @@ import type { AztecNode } from '@aztec/stdlib/interfaces/client'; * @param l1ToL2MessageHash - Hash of the L1 to L2 message * @param opts - Options */ -export async function waitForL1ToL2MessageReady( - node: Pick, +export function waitForL1ToL2MessageReady( + node: Pick, l1ToL2MessageHash: Fr, opts: { /** Timeout for the operation in seconds */ timeoutSeconds: number; - /** True if the message is meant to be consumed from a public function */ forPublicConsumption: boolean; }, ) { - const messageBlockNumber = await node.getL1ToL2MessageBlock(l1ToL2MessageHash); return retryUntil( - () => isL1ToL2MessageReady(node, l1ToL2MessageHash, { ...opts, messageBlockNumber }), + () => isL1ToL2MessageReady(node, l1ToL2MessageHash), `L1 to L2 message ${l1ToL2MessageHash.toString()} ready`, opts.timeoutSeconds, 1, @@ -29,25 +27,18 @@ export async function waitForL1ToL2MessageReady( * Returns whether the L1 to L2 message is ready to be consumed. * @param node - Aztec node instance used to obtain the information about the message * @param l1ToL2MessageHash - Hash of the L1 to L2 message - * @param opts - Options * @returns True if the message is ready to be consumed, false otherwise */ export async function isL1ToL2MessageReady( - node: Pick, + node: Pick, l1ToL2MessageHash: Fr, - opts: { - /** True if the message is meant to be consumed from a public function */ forPublicConsumption: boolean; - /** Cached synced block number for the message (will be fetched from PXE otherwise) */ messageBlockNumber?: number; - }, ): Promise { - const blockNumber = await node.getBlockNumber(); - const messageBlockNumber = opts.messageBlockNumber ?? (await node.getL1ToL2MessageBlock(l1ToL2MessageHash)); - if (messageBlockNumber === undefined) { + const messageCheckpointNumber = await node.getL1ToL2MessageCheckpoint(l1ToL2MessageHash); + if (messageCheckpointNumber === undefined) { return false; } - // Note that public messages can be consumed 1 block earlier, since the sequencer will include the messages - // in the L1 to L2 message tree before executing the txs for the block. In private, however, we need to wait - // until the message is included so we can make use of the membership witness. - return opts.forPublicConsumption ? blockNumber + 1 >= messageBlockNumber : blockNumber >= messageBlockNumber; + // L1 to L2 messages are included in the first block of a checkpoint + const latestBlock = await node.getBlock('latest'); + return latestBlock !== undefined && latestBlock.checkpointNumber >= messageCheckpointNumber; } diff --git a/yarn-project/bot/src/cross_chain_bot.ts b/yarn-project/bot/src/cross_chain_bot.ts index 0165b5a778a8..52c59a54f58d 100644 --- a/yarn-project/bot/src/cross_chain_bot.ts +++ b/yarn-project/bot/src/cross_chain_bot.ts @@ -174,14 +174,7 @@ export class CrossChainBot extends BaseBot { ): Promise { const now = Date.now(); for (const msg of pendingMessages) { - const ready = await isL1ToL2MessageReady(this.node, Fr.fromHexString(msg.msgHash), { - // Use forPublicConsumption: false so we wait until blockNumber >= messageBlockNumber. - // With forPublicConsumption: true, the check returns true one block early (the sequencer - // includes L1→L2 messages before executing the block's txs), but gas estimation simulates - // against the current world state which doesn't yet have the message. - // See https://linear.app/aztec-labs/issue/A-548 for details. - forPublicConsumption: false, - }); + const ready = await isL1ToL2MessageReady(this.node, Fr.fromHexString(msg.msgHash)); if (ready) { return msg; } diff --git a/yarn-project/bot/src/factory.ts b/yarn-project/bot/src/factory.ts index 970dd845ec69..9fc4a49960c0 100644 --- a/yarn-project/bot/src/factory.ts +++ b/yarn-project/bot/src/factory.ts @@ -162,11 +162,6 @@ export class BotFactory { const firstMsg = allMessages[0]; await waitForL1ToL2MessageReady(this.aztecNode, Fr.fromHexString(firstMsg.msgHash), { timeoutSeconds: this.config.l1ToL2MessageTimeoutSeconds, - // Use forPublicConsumption: false so we wait until the message is in the current world - // state. With true, it returns one block early which causes gas estimation simulation to - // fail since it runs against the current state. - // See https://linear.app/aztec-labs/issue/A-548 for details. - forPublicConsumption: false, }); this.log.info(`First L1→L2 message is ready`); } @@ -511,7 +506,6 @@ export class BotFactory { await this.withNoMinTxsPerBlock(() => waitForL1ToL2MessageReady(this.aztecNode, messageHash, { timeoutSeconds: this.config.l1ToL2MessageTimeoutSeconds, - forPublicConsumption: false, }), ); return existingClaim.claim; @@ -550,7 +544,6 @@ export class BotFactory { await this.withNoMinTxsPerBlock(() => waitForL1ToL2MessageReady(this.aztecNode, Fr.fromHexString(claim.messageHash), { timeoutSeconds: this.config.l1ToL2MessageTimeoutSeconds, - forPublicConsumption: false, }), ); diff --git a/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts b/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts index a0362bf60e56..385076c83cdf 100644 --- a/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts +++ b/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts @@ -280,6 +280,12 @@ describe('e2e_node_rpc_perf', () => { expect(stats.avg).toBeLessThan(1000); }); + it('benchmarks getCheckpointNumber', async () => { + const { stats } = await benchmark('getCheckpointNumber', () => aztecNode.getCheckpointNumber()); + addResult('getCheckpointNumber', stats); + expect(stats.avg).toBeLessThan(1000); + }); + it('benchmarks getProvenBlockNumber', async () => { const { stats } = await benchmark('getProvenBlockNumber', () => aztecNode.getProvenBlockNumber()); addResult('getProvenBlockNumber', stats); @@ -414,10 +420,12 @@ describe('e2e_node_rpc_perf', () => { }); describe('message APIs', () => { - it('benchmarks getL1ToL2MessageBlock', async () => { + it('benchmarks getL1ToL2MessageCheckpoint', async () => { const l1ToL2Message = Fr.random(); - const { stats } = await benchmark('getL1ToL2MessageBlock', () => aztecNode.getL1ToL2MessageBlock(l1ToL2Message)); - addResult('getL1ToL2MessageBlock', stats); + const { stats } = await benchmark('getL1ToL2MessageCheckpoint', () => + aztecNode.getL1ToL2MessageCheckpoint(l1ToL2Message), + ); + addResult('getL1ToL2MessageCheckpoint', stats); expect(stats.avg).toBeLessThan(2000); }); diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts index e83b195c5f84..ec5ecbe78b57 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts @@ -6,7 +6,7 @@ import { isL1ToL2MessageReady } from '@aztec/aztec.js/messaging'; import type { AztecNode } from '@aztec/aztec.js/node'; import { TxExecutionResult } from '@aztec/aztec.js/tx'; import type { Wallet } from '@aztec/aztec.js/wallet'; -import { BlockNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, IndexWithinCheckpoint } from '@aztec/foundation/branded-types'; import { timesAsync } from '@aztec/foundation/collection'; import { retryUntil } from '@aztec/foundation/retry'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; @@ -56,7 +56,34 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { if (newBlock === block) { throw new Error(`Failed to advance block ${block}`); } - return undefined; + return newBlock; + }; + + const waitForBlockToCheckpoint = async (blockNumber: BlockNumber) => { + return await retryUntil( + async () => { + const checkpointedBlockNumber = await aztecNode.getCheckpointedBlockNumber(); + const isCheckpointed = checkpointedBlockNumber >= blockNumber; + if (!isCheckpointed) { + return undefined; + } + const [checkpointedBlock] = await aztecNode.getCheckpointedBlocks(blockNumber, 1); + return checkpointedBlock.checkpointNumber; + }, + 'wait for block to checkpoint', + 60, + ); + }; + + const advanceCheckpoint = async () => { + let checkpoint = await aztecNode.getCheckpointNumber(); + const originalCheckpoint = checkpoint; + log.warn(`Original checkpoint ${originalCheckpoint}`); + do { + const newBlock = await advanceBlock(); + checkpoint = await waitForBlockToCheckpoint(newBlock); + } while (checkpoint <= originalCheckpoint); + log.warn(`At checkpoint ${checkpoint}`); }; // Same as above but ignores errors. Useful if we expect a prune. @@ -68,12 +95,19 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { } }; - // Waits until the message is fetched by the archiver of the node and returns the msg target block + // Waits until the message is fetched by the archiver of the node and returns the msg target checkpoint const waitForMessageFetched = async (msgHash: Fr) => { log.warn(`Waiting until the message is fetched by the node`); return await retryUntil( - async () => (await aztecNode.getL1ToL2MessageBlock(msgHash)) ?? (await advanceBlock()), - 'get msg block', + async () => { + const checkpoint = await aztecNode.getL1ToL2MessageCheckpoint(msgHash); + if (checkpoint !== undefined) { + return checkpoint; + } + await advanceBlock(); + return undefined; + }, + 'get msg checkpoint', 60, ); }; @@ -84,20 +118,27 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { scope: 'private' | 'public', onNotReady?: (blockNumber: BlockNumber) => Promise, ) => { - const msgBlock = await waitForMessageFetched(msgHash); - log.warn(`Waiting until L2 reaches msg block ${msgBlock} (current is ${await aztecNode.getBlockNumber()})`); + const msgCheckpoint = await waitForMessageFetched(msgHash); + log.warn( + `Waiting until L2 reaches the first block of msg checkpoint ${msgCheckpoint} (current is ${await aztecNode.getCheckpointNumber()})`, + ); await retryUntil( async () => { - const blockNumber = await aztecNode.getBlockNumber(); + const [blockNumber, checkpointNumber] = await Promise.all([ + aztecNode.getBlockNumber(), + aztecNode.getCheckpointNumber(), + ]); const witness = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash); - const isReady = await isL1ToL2MessageReady(aztecNode, msgHash, { forPublicConsumption: scope === 'public' }); - log.info(`Block is ${blockNumber}. Message block is ${msgBlock}. Witness ${!!witness}. Ready ${isReady}.`); + const isReady = await isL1ToL2MessageReady(aztecNode, msgHash); + log.info( + `Block is ${blockNumber}, checkpoint is ${checkpointNumber}. Message checkpoint is ${msgCheckpoint}. Witness ${!!witness}. Ready ${isReady}.`, + ); if (!isReady) { await (onNotReady ? onNotReady(blockNumber) : advanceBlock()); } return isReady; }, - `wait for rollup to reach msg block ${msgBlock}`, + `wait for rollup to reach msg checkpoint ${msgCheckpoint}`, 120, ); }; @@ -118,12 +159,8 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { await waitForMessageReady(message1Hash, scope); - // The waitForMessageReady returns true earlier for public-land, so we can only check the membership - // witness for private-land here. - if (scope === 'private') { - const [message1Index] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', message1Hash))!; - expect(actualMessage1Index.toBigInt()).toBe(message1Index); - } + const [message1Index] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', message1Hash))!; + expect(actualMessage1Index.toBigInt()).toBe(message1Index); // We consume the L1 to L2 message using the test contract either from private or public await getConsumeMethod(scope)( @@ -143,12 +180,10 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { // We check that the duplicate message was correctly inserted by checking that its message index is defined await waitForMessageReady(message2Hash, scope); - if (scope === 'private') { - const [message2Index] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', message2Hash))!; - expect(message2Index).toBeDefined(); - expect(message2Index).toBeGreaterThan(actualMessage1Index.toBigInt()); - expect(actualMessage2Index.toBigInt()).toBe(message2Index); - } + const [message2Index] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', message2Hash))!; + expect(message2Index).toBeDefined(); + expect(message2Index).toBeGreaterThan(actualMessage1Index.toBigInt()); + expect(actualMessage2Index.toBigInt()).toBe(message2Index); // Now we consume the message again. Everything should pass because oracle should return the duplicate message // which is not nullified @@ -162,21 +197,22 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { 120_000, ); - // Inbox block number can drift on two scenarios: if the rollup reorgs and rolls back its own - // block number, or if the inbox receives too many messages and they are inserted faster than - // they are consumed. In this test, we mine several blocks without marking them as proven until + // Inbox checkpoint number can drift on two scenarios: if the rollup reorgs and rolls back its own + // checkpoint number, or if the inbox receives too many messages and they are inserted faster than + // they are consumed. In this test, we mine several checkpoints without marking them as proven until // we can trigger a reorg, and then wait until the message can be processed to consume it. it.each(['private', 'public'] as const)( 'can consume L1 to L2 message in %s after inbox drifts away from the rollup', async (scope: 'private' | 'public') => { // Stop proving const lastProven = await aztecNode.getBlockNumber(); - log.warn(`Stopping proof submission at block ${lastProven} to allow drift`); + const [checkpointedProvenBlock] = await aztecNode.getCheckpointedBlocks(lastProven, 1); + log.warn(`Stopping proof submission at checkpoint ${checkpointedProvenBlock.checkpointNumber} to allow drift`); t.context.watcher.setIsMarkingAsProven(false); - // Mine several blocks to ensure drift + // Mine several checkpoints to ensure drift log.warn(`Mining blocks to allow drift`); - await timesAsync(4, advanceBlock); + await timesAsync(4, advanceCheckpoint); // Generate and send the message to the L1 contract log.warn(`Sending L1 to L2 message`); @@ -185,9 +221,9 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { const { msgHash, globalLeafIndex } = await sendL1ToL2Message(message, crossChainTestHarness); // Wait until the Aztec node has synced it - const msgBlockNumber = await waitForMessageFetched(msgHash); - log.warn(`Message synced for block ${msgBlockNumber}`); - expect(lastProven + 4).toBeLessThan(msgBlockNumber); + const msgCheckpointNumber = await waitForMessageFetched(msgHash); + log.warn(`Message synced for checkpoint ${msgCheckpointNumber}`); + expect(checkpointedProvenBlock.checkpointNumber + 4).toBeLessThan(msgCheckpointNumber); // And keep mining until we prune back to the original block number. Now the "waiting for two blocks" // strategy for the message to be ready to use shouldn't work, since the lastProven block is more than @@ -214,25 +250,33 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { // On private, we simulate the tx locally and check that we get a missing message error, then we advance to the next block await expect(() => consume().simulate({ from: user1Address })).rejects.toThrow(/No L1 to L2 message found/); await tryAdvanceBlock(); - await t.context.watcher.markAsProven(); } else { - // On public, we actually send the tx and check that it reverts due to the missing message. - // This advances the block too as a side-effect. Note that we do not rely on a simulation since the cross chain messages - // do not get added at the beginning of the block during node_simulatePublicCalls (maybe they should?). + // In public it is harder to determine when a message becomes consumable. + // We send a transaction, this advances the chain and the message MIGHT be consumed in the new block. + // If it does get consumed then we check that the block contains the message. + // If it fails we check that the block doesn't contain the message const receipt = await consume().send({ from: user1Address, wait: { dontThrowOnRevert: true } }); - expect(receipt.executionResult).toEqual(TxExecutionResult.APP_LOGIC_REVERTED); - await t.context.watcher.markAsProven(); + if (receipt.executionResult === TxExecutionResult.SUCCESS) { + // The block the transaction included should be for the message checkpoint number + // and be the first block in the checkpoint + const block = await aztecNode.getBlock(receipt.blockNumber!); + expect(block).toBeDefined(); + expect(block!.checkpointNumber).toEqual(msgCheckpointNumber); + expect(block!.indexWithinCheckpoint).toEqual(IndexWithinCheckpoint.ZERO); + } else { + expect(receipt.executionResult).toEqual(TxExecutionResult.APP_LOGIC_REVERTED); + } } + await t.context.watcher.markAsProven(); }); // Verify the membership witness is available for creating the tx (private-land only) if (scope === 'private') { const [messageIndex] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash))!; expect(messageIndex).toEqual(globalLeafIndex.toBigInt()); + // And consume the message for private, public was already consumed. + await consume().send({ from: user1Address }); } - - // And consume the message - await consume().send({ from: user1Address }); }, ); }); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts index 247a56d44ae9..161bd84bbf38 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps.parallel.test.ts @@ -370,7 +370,7 @@ describe('e2e_epochs/epochs_mbps', () => { l1ToL2Messages.map(async ({ msgHash }, i) => { logger.warn(`Waiting for L1→L2 message ${i + 1} to be ready`); await retryUntil( - () => isL1ToL2MessageReady(context.aztecNode, msgHash, { forPublicConsumption: true }), + () => isL1ToL2MessageReady(context.aztecNode, msgHash), `L1→L2 message ${i + 1} ready`, test.L2_SLOT_DURATION_IN_S * 5, ); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts index 846cd5f82b96..699930ba4395 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts @@ -57,7 +57,6 @@ describe('e2e_epochs/epochs_proof_public_cross_chain', () => { logger.warn(`Waiting for message ${msgHash} with index ${globalLeafIndex} to be synced`); await waitForL1ToL2MessageReady(context.aztecNode, msgHash, { - forPublicConsumption: true, timeoutSeconds: test.L2_SLOT_DURATION_IN_S * 6, }); diff --git a/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts b/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts index 8009d474e601..790c19e277a4 100644 --- a/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts +++ b/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts @@ -278,7 +278,7 @@ async function bridgeL1FeeJuice( const claim = await portal.bridgeTokensPublic(recipient, amount, true /* mint */); const isSynced = async () => - (await aztecNode.getL1ToL2MessageBlock(Fr.fromHexString(claim.messageHash))) !== undefined; + (await aztecNode.getL1ToL2MessageCheckpoint(Fr.fromHexString(claim.messageHash))) !== undefined; await retryUntil(isSynced, `message ${claim.messageHash} sync`, 24, 0.5); log.info(`Created a claim for ${amount} L1 fee juice to ${recipient}.`, claim); diff --git a/yarn-project/stdlib/src/block/l2_block_source.ts b/yarn-project/stdlib/src/block/l2_block_source.ts index f1daf550d7eb..39368d09ad99 100644 --- a/yarn-project/stdlib/src/block/l2_block_source.ts +++ b/yarn-project/stdlib/src/block/l2_block_source.ts @@ -49,6 +49,12 @@ export interface L2BlockSource { */ getBlockNumber(): Promise; + /** + * Gets the number of the latest L2 checkpoint processed by the block source implementation. + * @returns The number of the latest L2 checkpoint processed by the block source implementation. + */ + getCheckpointNumber(): Promise; + /** * Gets the number of the latest L2 block proven seen by the block source implementation. * @returns The number of the latest L2 block proven seen by the block source implementation. diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index 2b5cb983325b..710118ceb5e9 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -81,6 +81,11 @@ describe('ArchiverApiSchema', () => { expect(result).toEqual(BlockNumber(1)); }); + it('getCheckpointNumber', async () => { + const result = await context.client.getCheckpointNumber(); + expect(result).toEqual(CheckpointNumber(1)); + }); + it('getProvenBlockNumber', async () => { const result = await context.client.getProvenBlockNumber(); expect(result).toEqual(BlockNumber(1)); @@ -405,6 +410,9 @@ class MockArchiver implements ArchiverApi { getCheckpointedL2BlockNumber(): Promise { return Promise.resolve(BlockNumber(1)); } + getCheckpointNumber(): Promise { + return Promise.resolve(CheckpointNumber(1)); + } getFinalizedL2BlockNumber(): Promise { return Promise.resolve(BlockNumber(0)); } diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index 9af2b49e6fbc..949c66575040 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -86,6 +86,7 @@ export const ArchiverApiSchema: ApiSchemaFor = { getBlockNumber: z.function().args().returns(BlockNumberSchema), getProvenBlockNumber: z.function().args().returns(BlockNumberSchema), getCheckpointedL2BlockNumber: z.function().args().returns(BlockNumberSchema), + getCheckpointNumber: z.function().args().returns(CheckpointNumberSchema), getFinalizedL2BlockNumber: z.function().args().returns(BlockNumberSchema), getBlock: z.function().args(BlockNumberSchema).returns(L2Block.schema.optional()), getBlockHeader: z diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts index 436945dd773e..9afac73c16d5 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts @@ -115,8 +115,8 @@ describe('AztecNodeApiSchema', () => { expect(response).toEqual([1n, expect.any(SiblingPath)]); }); - it('getL1ToL2MessageBlock', async () => { - const response = await context.client.getL1ToL2MessageBlock(Fr.random()); + it('getL1ToL2MessageCheckpoint', async () => { + const response = await context.client.getL1ToL2MessageCheckpoint(Fr.random()); expect(response).toEqual(5); }); @@ -209,6 +209,11 @@ describe('AztecNodeApiSchema', () => { expect(response).toBe(BlockNumber(1)); }); + it('getCheckpointNumber', async () => { + const response = await context.client.getCheckpointNumber(); + expect(response).toBe(CheckpointNumber(1)); + }); + it('isReady', async () => { const response = await context.client.isReady(); expect(response).toBe(true); @@ -578,9 +583,9 @@ class MockAztecNode implements AztecNode { expect(noteHash).toBeInstanceOf(Fr); return Promise.resolve(MembershipWitness.random(NOTE_HASH_TREE_HEIGHT)); } - getL1ToL2MessageBlock(l1ToL2Message: Fr): Promise { + getL1ToL2MessageCheckpoint(l1ToL2Message: Fr): Promise { expect(l1ToL2Message).toBeInstanceOf(Fr); - return Promise.resolve(BlockNumber(5)); + return Promise.resolve(CheckpointNumber(5)); } isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise { expect(l1ToL2Message).toBeInstanceOf(Fr); @@ -658,6 +663,9 @@ class MockAztecNode implements AztecNode { getCheckpointedBlockNumber(): Promise { return Promise.resolve(BlockNumber(1)); } + getCheckpointNumber(): Promise { + return Promise.resolve(CheckpointNumber(1)); + } isReady(): Promise { return Promise.resolve(true); } diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.ts b/yarn-project/stdlib/src/interfaces/aztec-node.ts index 34b8639eee7f..7f613d9e6891 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.ts @@ -4,7 +4,9 @@ import { BlockNumber, BlockNumberPositiveSchema, BlockNumberSchema, + CheckpointNumber, CheckpointNumberPositiveSchema, + CheckpointNumberSchema, EpochNumber, EpochNumberSchema, type SlotNumber, @@ -172,14 +174,14 @@ export interface AztecNode l1ToL2Message: Fr, ): Promise<[bigint, SiblingPath] | undefined>; - /** Returns the L2 block number in which this L1 to L2 message becomes available, or undefined if not found. */ - getL1ToL2MessageBlock(l1ToL2Message: Fr): Promise; + /** Returns the L2 checkpoint number in which this L1 to L2 message becomes available, or undefined if not found. */ + getL1ToL2MessageCheckpoint(l1ToL2Message: Fr): Promise; /** * Returns whether an L1 to L2 message is synced by archiver. * @param l1ToL2Message - The L1 to L2 message to check. * @returns Whether the message is synced. - * @deprecated Use `getL1ToL2MessageBlock` instead. This method may return true even if the message is not ready to use. + * @deprecated Use `getL1ToL2MessageCheckpoint` instead. This method may return true even if the message is not ready to use. */ isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise; @@ -230,6 +232,12 @@ export interface AztecNode */ getCheckpointedBlockNumber(): Promise; + /** + * Method to fetch the latest checkpoint number synchronized by the node. + * @returns The checkpoint number. + */ + getCheckpointNumber(): Promise; + /** * Method to determine if the node is ready to accept transactions. * @returns - Flag indicating the readiness for tx submission. @@ -517,7 +525,7 @@ export const AztecNodeApiSchema: ApiSchemaFor = { .args(BlockParameterSchema, schemas.Fr) .returns(z.tuple([schemas.BigInt, SiblingPath.schemaFor(L1_TO_L2_MSG_TREE_HEIGHT)]).optional()), - getL1ToL2MessageBlock: z.function().args(schemas.Fr).returns(BlockNumberSchema.optional()), + getL1ToL2MessageCheckpoint: z.function().args(schemas.Fr).returns(CheckpointNumberSchema.optional()), isL1ToL2MessageSynced: z.function().args(schemas.Fr).returns(z.boolean()), @@ -534,6 +542,8 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getBlockNumber: z.function().returns(BlockNumberSchema), + getCheckpointNumber: z.function().returns(CheckpointNumberSchema), + getProvenBlockNumber: z.function().returns(BlockNumberSchema), getCheckpointedBlockNumber: z.function().returns(BlockNumberSchema), From 13ae331caa84f93b5a98b1e7a24c49ca6f2242ab Mon Sep 17 00:00:00 2001 From: danielntmd <162406516+danielntmd@users.noreply.github.com> Date: Tue, 3 Mar 2026 12:52:19 -0500 Subject: [PATCH 12/37] fix: (A-589) epochs l1 reorgs test (#20999) Use same structure as the handles missed message inserted by an L1 reorg test to wait for checkpoint when sending L2 txs to help trigger mbps. Co-authored-by: danielntmd --- .../src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts index f2f78d843213..f327326dbeef 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.parallel.test.ts @@ -407,8 +407,9 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { ); it('updates L1 to L2 messages changed due to an L1 reorg', async () => { - // Send L2 txs to trigger multi-block checkpoints + // Send L2 txs to trigger multi-block checkpoints and wait for them to land in a checkpoint await sendTransactions(TX_COUNT, 100); + await test.waitUntilCheckpointNumber(CheckpointNumber(2), L2_SLOT_DURATION_IN_S * 4); // Send 3 messages and wait for archiver sync logger.warn(`Sending 3 cross chain messages`); From 503d0f91682e8f1567281d9904e083a45feb943b Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 3 Mar 2026 17:31:05 -0300 Subject: [PATCH 13/37] feat(sequencer): add SEQ_MAX_TX_PER_CHECKPOINT config (#21016) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes A-611 ## Summary - Adds `SEQ_MAX_TX_PER_CHECKPOINT` env var and `maxTxsPerCheckpoint` config to limit total transactions across all blocks in a checkpoint, mirroring the existing two-level pattern for gas limits - Proposers derive per-block TX limits from the checkpoint limit (using `ceil(checkpointLimit / maxBlocks * multiplier)`) when `SEQ_MAX_TX_PER_BLOCK` is not explicitly set - Validators enforce the checkpoint TX budget via `capLimitsByCheckpointBudgets` and `validateCheckpoint` - Makes `maxTxsPerBlock` mandatory (`number | undefined`) in proposal validator constructors to prevent missed wiring, and wires it through P2P, validator-client, and sequencer-client - Extracts `computeBlockLimits` as a free function with dedicated unit tests - Moves `maxTxsPerBlock` config mapping to `sharedSequencerConfigMappings` so both P2P and sequencer packages share it - Adds extra structural validations to the checkpoint validation, and wires them to the archiver data updater. ## Test plan - Unit tests for `computeBlockLimits` (L2 gas, DA gas, TX count derivation, multi-block mode) - Unit tests for `maxTxsPerBlock` validation in shared proposal validator test suite (block + checkpoint) - Unit tests for `maxTxsPerCheckpoint` capping in `checkpoint_builder.test.ts` - Unit tests for TX limit validation in `validate.ts` - Existing sequencer and checkpoint proposal job tests updated and passing 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Opus 4.6 --- yarn-project/archiver/src/archiver.ts | 10 +- yarn-project/archiver/src/factory.ts | 3 + .../src/modules/data_store_updater.test.ts | 25 +- .../src/modules/data_store_updater.ts | 7 +- .../archiver/src/modules/l1_synchronizer.ts | 10 +- .../archiver/src/test/mock_structs.ts | 26 +- yarn-project/foundation/src/config/env_var.ts | 1 + .../checkpoint_proposal_validator.test.ts | 66 +++++ .../proposal_validator_test_suite.ts | 12 +- .../p2p/src/services/libp2p/libp2p_service.ts | 10 +- .../src/client/sequencer-client.test.ts | 110 +++++++++ .../src/client/sequencer-client.ts | 143 ++++++----- yarn-project/sequencer-client/src/config.ts | 7 +- .../src/sequencer/checkpoint_proposal_job.ts | 4 +- .../src/sequencer/sequencer.test.ts | 2 +- .../src/test/mock_checkpoint_builder.ts | 89 +++---- yarn-project/stdlib/src/block/l2_block.ts | 2 +- .../stdlib/src/checkpoint/checkpoint.ts | 15 +- .../stdlib/src/checkpoint/validate.test.ts | 233 ++++++++++++++++++ .../stdlib/src/checkpoint/validate.ts | 154 ++++++++++-- .../stdlib/src/config/sequencer-config.ts | 4 +- .../stdlib/src/interfaces/block-builder.ts | 2 + yarn-project/stdlib/src/interfaces/configs.ts | 5 + yarn-project/stdlib/src/tests/mocks.ts | 21 +- yarn-project/stdlib/src/tx/block_header.ts | 6 + yarn-project/validator-client/README.md | 6 +- .../src/checkpoint_builder.test.ts | 70 ++++++ .../src/checkpoint_builder.ts | 14 +- .../src/validator.ha.integration.test.ts | 1 + 29 files changed, 879 insertions(+), 179 deletions(-) create mode 100644 yarn-project/sequencer-client/src/client/sequencer-client.test.ts create mode 100644 yarn-project/stdlib/src/checkpoint/validate.test.ts diff --git a/yarn-project/archiver/src/archiver.ts b/yarn-project/archiver/src/archiver.ts index de82a0482186..28c0cfa720ab 100644 --- a/yarn-project/archiver/src/archiver.ts +++ b/yarn-project/archiver/src/archiver.ts @@ -120,7 +120,11 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra }, private readonly blobClient: BlobClientInterface, instrumentation: ArchiverInstrumentation, - protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, + protected override readonly l1Constants: L1RollupConstants & { + l1StartBlockHash: Buffer32; + genesisArchiveRoot: Fr; + rollupManaLimit?: number; + }, synchronizer: ArchiverL1Synchronizer, events: ArchiverEmitter, l2TipsCache?: L2TipsCache, @@ -133,7 +137,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra this.synchronizer = synchronizer; this.events = events; this.l2TipsCache = l2TipsCache ?? new L2TipsCache(this.dataStore.blockStore); - this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache); + this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache, { + rollupManaLimit: l1Constants.rollupManaLimit, + }); // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync // are done as fast as possible. This then gets updated once the initial sync completes. diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index ca4d60f8a780..f7f2d46b44db 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -85,6 +85,7 @@ export async function createArchiver( genesisArchiveRoot, slashingProposerAddress, targetCommitteeSize, + rollupManaLimit, ] = await Promise.all([ rollup.getL1StartBlock(), rollup.getL1GenesisTime(), @@ -92,6 +93,7 @@ export async function createArchiver( rollup.getGenesisArchiveTreeRoot(), rollup.getSlashingProposerAddress(), rollup.getTargetCommitteeSize(), + rollup.getManaLimit(), ] as const); const l1StartBlockHash = await publicClient @@ -110,6 +112,7 @@ export async function createArchiver( proofSubmissionEpochs: Number(proofSubmissionEpochs), targetCommitteeSize, genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()), + rollupManaLimit: Number(rollupManaLimit), }; const archiverConfig = merge( diff --git a/yarn-project/archiver/src/modules/data_store_updater.test.ts b/yarn-project/archiver/src/modules/data_store_updater.test.ts index e261b76faab9..94721e4c22ea 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.test.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.test.ts @@ -5,9 +5,7 @@ import { ContractClassPublishedEvent } from '@aztec/protocol-contracts/class-reg import { ContractInstancePublishedEvent } from '@aztec/protocol-contracts/instance-registry'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { L2Block } from '@aztec/stdlib/block'; -import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { ContractClassLog, PrivateLog } from '@aztec/stdlib/logs'; -import { CheckpointHeader } from '@aztec/stdlib/rollup'; import '@aztec/stdlib/testing/jest'; import { readFileSync } from 'fs'; @@ -15,7 +13,7 @@ import { dirname, resolve } from 'path'; import { fileURLToPath } from 'url'; import { KVArchiverDataStore } from '../store/kv_archiver_store.js'; -import { makePublishedCheckpoint } from '../test/mock_structs.js'; +import { makeCheckpoint, makePublishedCheckpoint } from '../test/mock_structs.js'; import { ArchiverDataStoreUpdater } from './data_store_updater.js'; /** Loads the sample ContractClassPublished event payload from protocol-contracts fixtures. */ @@ -110,12 +108,7 @@ describe('ArchiverDataStoreUpdater', () => { // Make sure it has a different archive root (which it will by default from random) expect(conflictingBlock.archive.root.equals(localBlock.archive.root)).toBe(false); - const checkpointWithConflict = new Checkpoint( - conflictingBlock.archive, - CheckpointHeader.random({ slotNumber: SlotNumber(100) }), - [conflictingBlock], - CheckpointNumber(1), - ); + const checkpointWithConflict = makeCheckpoint([conflictingBlock]); const publishedCheckpoint = makePublishedCheckpoint(checkpointWithConflict, 10); // This should detect the conflict and prune the local block @@ -135,8 +128,7 @@ describe('ArchiverDataStoreUpdater', () => { block.body.txEffects[0].contractClassLogs = [contractClassLog]; block.body.txEffects[0].privateLogs = [PrivateLog.fromBuffer(getSampleContractInstancePublishedEventPayload())]; - const checkpoint = new Checkpoint(block.archive, CheckpointHeader.random(), [block], CheckpointNumber(1)); - const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10); + const publishedCheckpoint = makePublishedCheckpoint(makeCheckpoint([block]), 10); await updater.addCheckpoints([publishedCheckpoint]); @@ -166,8 +158,7 @@ describe('ArchiverDataStoreUpdater', () => { await updater.addProposedBlocks([block]); // Create checkpoint with the SAME block (same archive root) - const checkpoint = new Checkpoint(block.archive, CheckpointHeader.random(), [block], CheckpointNumber(1)); - const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10); + const publishedCheckpoint = makePublishedCheckpoint(makeCheckpoint([block]), 10); await updater.addCheckpoints([publishedCheckpoint]); @@ -196,13 +187,7 @@ describe('ArchiverDataStoreUpdater', () => { }); expect(checkpointBlock.archive.root.equals(localBlock.archive.root)).toBe(false); - const checkpoint = new Checkpoint( - checkpointBlock.archive, - CheckpointHeader.random({ slotNumber: SlotNumber(100) }), - [checkpointBlock], - CheckpointNumber(1), - ); - await updater.addCheckpoints([makePublishedCheckpoint(checkpoint, 10)]); + await updater.addCheckpoints([makePublishedCheckpoint(makeCheckpoint([checkpointBlock]), 10)]); // Verify checkpoint block is stored const storedBlock = await store.getBlock(BlockNumber(1)); diff --git a/yarn-project/archiver/src/modules/data_store_updater.ts b/yarn-project/archiver/src/modules/data_store_updater.ts index dd2e6becd57a..83864240f01d 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.ts @@ -11,7 +11,7 @@ import { ContractInstanceUpdatedEvent, } from '@aztec/protocol-contracts/instance-registry'; import type { L2Block, ValidateCheckpointResult } from '@aztec/stdlib/block'; -import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { type PublishedCheckpoint, validateCheckpoint } from '@aztec/stdlib/checkpoint'; import { type ExecutablePrivateFunctionWithMembershipProof, type UtilityFunctionWithMembershipProof, @@ -48,6 +48,7 @@ export class ArchiverDataStoreUpdater { constructor( private store: KVArchiverDataStore, private l2TipsCache?: L2TipsCache, + private opts: { rollupManaLimit?: number } = {}, ) {} /** @@ -97,6 +98,10 @@ export class ArchiverDataStoreUpdater { checkpoints: PublishedCheckpoint[], pendingChainValidationStatus?: ValidateCheckpointResult, ): Promise { + for (const checkpoint of checkpoints) { + validateCheckpoint(checkpoint.checkpoint, { rollupManaLimit: this.opts?.rollupManaLimit }); + } + const result = await this.store.transactionAsync(async () => { // Before adding checkpoints, check for conflicts with local blocks if any const { prunedBlocks, lastAlreadyInsertedBlockNumber } = await this.pruneMismatchingLocalBlocks(checkpoints); diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index 640a10234127..221d50336fb7 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -69,13 +69,19 @@ export class ArchiverL1Synchronizer implements Traceable { private readonly epochCache: EpochCache, private readonly dateProvider: DateProvider, private readonly instrumentation: ArchiverInstrumentation, - private readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, + private readonly l1Constants: L1RollupConstants & { + l1StartBlockHash: Buffer32; + genesisArchiveRoot: Fr; + rollupManaLimit?: number; + }, private readonly events: ArchiverEmitter, tracer: Tracer, l2TipsCache?: L2TipsCache, private readonly log: Logger = createLogger('archiver:l1-sync'), ) { - this.updater = new ArchiverDataStoreUpdater(this.store, l2TipsCache); + this.updater = new ArchiverDataStoreUpdater(this.store, l2TipsCache, { + rollupManaLimit: l1Constants.rollupManaLimit, + }); this.tracer = tracer; } diff --git a/yarn-project/archiver/src/test/mock_structs.ts b/yarn-project/archiver/src/test/mock_structs.ts index 974141601f5b..0888d717218c 100644 --- a/yarn-project/archiver/src/test/mock_structs.ts +++ b/yarn-project/archiver/src/test/mock_structs.ts @@ -127,6 +127,25 @@ export function makeL1PublishedData(l1BlockNumber: number): L1PublishedData { return new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)); } +/** Creates a Checkpoint from a list of blocks with a header that matches the blocks' structure. */ +export function makeCheckpoint(blocks: L2Block[], checkpointNumber = CheckpointNumber(1)): Checkpoint { + const firstBlock = blocks[0]; + const { slotNumber, timestamp, coinbase, feeRecipient, gasFees } = firstBlock.header.globalVariables; + return new Checkpoint( + blocks.at(-1)!.archive, + CheckpointHeader.random({ + lastArchiveRoot: firstBlock.header.lastArchive.root, + slotNumber, + timestamp, + coinbase, + feeRecipient, + gasFees, + }), + blocks, + checkpointNumber, + ); +} + /** Wraps a Checkpoint with L1 published data and random attestations. */ export function makePublishedCheckpoint( checkpoint: Checkpoint, @@ -301,11 +320,6 @@ export async function makeCheckpointWithLogs( return txEffect; }); - const checkpoint = new Checkpoint( - AppendOnlyTreeSnapshot.random(), - CheckpointHeader.random(), - [block], - CheckpointNumber.fromBlockNumber(BlockNumber(blockNumber)), - ); + const checkpoint = makeCheckpoint([block], CheckpointNumber.fromBlockNumber(BlockNumber(blockNumber))); return makePublishedCheckpoint(checkpoint, blockNumber); } diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index b1a3c3734b57..45fe01dcdc11 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -202,6 +202,7 @@ export type EnvVar = | 'SENTINEL_HISTORY_LENGTH_IN_EPOCHS' | 'SENTINEL_HISTORIC_PROVEN_PERFORMANCE_LENGTH_IN_EPOCHS' | 'SEQ_MAX_TX_PER_BLOCK' + | 'SEQ_MAX_TX_PER_CHECKPOINT' | 'SEQ_MIN_TX_PER_BLOCK' | 'SEQ_PUBLISH_TXS_WITH_PROPOSALS' | 'SEQ_MAX_DA_BLOCK_GAS' diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts index 9bc2e2888864..ea093cd2ab2e 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts @@ -61,4 +61,70 @@ describe('CheckpointProposalValidator', () => { getTxs: () => [], epochCacheMock: () => mock(), }); + + describe('maxTxsPerBlock validation', () => { + const currentSlot = SlotNumber(100); + const nextSlot = SlotNumber(101); + let epochCache: ReturnType>; + + function setupEpochCache(proposerAddress: EthAddress) { + epochCache = mock(); + epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot, nextSlot }); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(proposerAddress); + } + + it('rejects checkpoint proposal when last block txHashes exceed maxTxsPerBlock', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 3 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: expect.anything() }); + }); + + it('accepts checkpoint proposal when last block txHashes are within maxTxsPerBlock', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 5 }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 3 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + + it('skips maxTxsPerBlock check when not configured', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { + txsPermitted: true, + maxTxsPerBlock: undefined, + }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 100 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + }); }); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts index e58a007a3de7..ec12ec3442f6 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts @@ -58,7 +58,7 @@ export function sharedProposalValidatorTests { epochCache = epochCacheMock(); - validator = validatorFactory(epochCache, { txsPermitted: true }); + validator = validatorFactory(epochCache, { txsPermitted: true, maxTxsPerBlock: undefined }); epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot: currentSlot, nextSlot: nextSlot, @@ -231,7 +231,10 @@ export function sharedProposalValidatorTests { it('returns mid tolerance error if txs not permitted and proposal contains txHashes', async () => { const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { txsPermitted: false }); + const validatorWithTxsDisabled = validatorFactory(epochCache, { + txsPermitted: false, + maxTxsPerBlock: undefined, + }); const header = makeHeader(1, 100, 100); const mockProposal = await makeProposal({ blockHeader: header, @@ -247,7 +250,10 @@ export function sharedProposalValidatorTests { const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { txsPermitted: false }); + const validatorWithTxsDisabled = validatorFactory(epochCache, { + txsPermitted: false, + maxTxsPerBlock: undefined, + }); const header = makeHeader(1, 100, 100); const mockProposal = await makeProposal({ blockHeader: header, diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 6066e7d2b6b3..de3c5c9f8fc8 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -222,14 +222,12 @@ export class LibP2PService extends WithTracer implements P2PService { this.protocolVersion, ); - this.blockProposalValidator = new BlockProposalValidator(epochCache, { + const proposalValidatorOpts = { txsPermitted: !config.disableTransactions, maxTxsPerBlock: config.maxTxsPerBlock, - }); - this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, { - txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.maxTxsPerBlock, - }); + }; + this.blockProposalValidator = new BlockProposalValidator(epochCache, proposalValidatorOpts); + this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, proposalValidatorOpts); this.checkpointAttestationValidator = config.fishermanMode ? new FishermanAttestationValidator(epochCache, mempools.attestationPool, telemetry) : new CheckpointAttestationValidator(epochCache); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.test.ts b/yarn-project/sequencer-client/src/client/sequencer-client.test.ts new file mode 100644 index 000000000000..e325cefca47d --- /dev/null +++ b/yarn-project/sequencer-client/src/client/sequencer-client.test.ts @@ -0,0 +1,110 @@ +import { MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import { createLogger } from '@aztec/foundation/log'; + +import type { SequencerClientConfig } from '../config.js'; +import { computeBlockLimits } from './sequencer-client.js'; + +describe('computeBlockLimits', () => { + const log = createLogger('test'); + + /** Builds a minimal config with only the fields needed by computeBlockLimits. */ + function makeConfig(overrides: Partial = {}): SequencerClientConfig { + return { + ethereumSlotDuration: 12, + aztecSlotDuration: 72, + attestationPropagationTime: 3, + enforceTimeTable: true, + // No blockDurationMs -> single block mode -> maxNumberOfBlocks = 1 + ...overrides, + } as SequencerClientConfig; + } + + describe('L2 gas', () => { + it('derives maxL2BlockGas from rollupManaLimit when not explicitly set', () => { + const rollupManaLimit = 1_000_000; + // Single block mode (maxNumberOfBlocks=1), default multiplier=2: + // min(1_000_000, ceil(1_000_000 / 1 * 2)) = min(1_000_000, 2_000_000) = 1_000_000 + const result = computeBlockLimits(makeConfig(), rollupManaLimit, 12, log); + expect(result.maxL2BlockGas).toBe(rollupManaLimit); + }); + + it('uses explicit maxL2BlockGas when within rollupManaLimit', () => { + const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 500_000 }), 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(500_000); + }); + + it('caps explicit maxL2BlockGas at rollupManaLimit', () => { + const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 2_000_000 }), 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(1_000_000); + }); + }); + + describe('DA gas', () => { + const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + + it('derives maxDABlockGas from DA checkpoint limit when not explicitly set', () => { + // Single block mode (maxNumberOfBlocks=1), default multiplier=2: + // min(daLimit, ceil(daLimit / 1 * 2)) = min(daLimit, daLimit * 2) = daLimit + const result = computeBlockLimits(makeConfig(), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(daLimit); + }); + + it('uses explicit maxDABlockGas when within DA checkpoint limit', () => { + const explicit = Math.floor(daLimit / 2); + const result = computeBlockLimits(makeConfig({ maxDABlockGas: explicit }), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(explicit); + }); + + it('caps explicit maxDABlockGas at DA checkpoint limit', () => { + const result = computeBlockLimits(makeConfig({ maxDABlockGas: daLimit + 100_000 }), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(daLimit); + }); + }); + + describe('TX count', () => { + it('uses explicit maxTxsPerBlock when set', () => { + const result = computeBlockLimits(makeConfig({ maxTxsPerBlock: 10 }), 1_000_000, 12, log); + expect(result.maxTxsPerBlock).toBe(10); + }); + + it('caps maxTxsPerBlock at maxTxsPerCheckpoint', () => { + const result = computeBlockLimits( + makeConfig({ maxTxsPerBlock: 50, maxTxsPerCheckpoint: 30 }), + 1_000_000, + 12, + log, + ); + expect(result.maxTxsPerBlock).toBe(30); + }); + + it('derives maxTxsPerBlock from maxTxsPerCheckpoint when per-block not set', () => { + // Multi-block mode with maxNumberOfBlocks=5, multiplier=2: + // min(100, ceil(100 / 5 * 2)) = min(100, 40) = 40 + const config = makeConfig({ + maxTxsPerCheckpoint: 100, + blockDurationMs: 8000, + }); + const result = computeBlockLimits(config, 1_000_000, 12, log); + expect(result.maxTxsPerBlock).toBe(40); + }); + }); + + describe('multi-block mode', () => { + it('distributes budget across blocks in multi-block mode', () => { + // With blockDurationMs=8000, aztecSlotDuration=72, ethereumSlotDuration=12, + // attestationPropagationTime=3, l1PublishingTime=12: + // checkpointFinalizationTime = 1 + 3*2 + 12 = 19 + // timeReservedAtEnd = 8 + 19 = 27 + // timeAvailableForBlocks = 72 - 1 - 27 = 44 + // maxNumberOfBlocks = floor(44 / 8) = 5 + // With multiplier=2 and rollupManaLimit=1_000_000: + // maxL2BlockGas = min(1_000_000, ceil(1_000_000 / 5 * 2)) = min(1_000_000, 400_000) = 400_000 + const config = makeConfig({ blockDurationMs: 8000 }); + const result = computeBlockLimits(config, 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(400_000); + + const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + expect(result.maxDABlockGas).toBe(Math.min(daLimit, Math.ceil((daLimit / 5) * 2))); + }); + }); +}); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 015401c0377c..613c5d172219 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -160,9 +160,12 @@ export class SequencerClient { const l1PublishingTimeBasedOnChain = isAnvilTestChain(config.l1ChainId) ? 1 : ethereumSlotDuration; const l1PublishingTime = config.l1PublishingTime ?? l1PublishingTimeBasedOnChain; - // Combine user-defined block-level limits with checkpoint-level limits (from L1/constants/config) - // to derive the final per-block gas budgets fed into the sequencer. - const { maxL2BlockGas, maxDABlockGas } = this.computeBlockGasLimits(config, rollupManaLimit, l1PublishingTime, log); + const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock } = computeBlockLimits( + config, + rollupManaLimit, + l1PublishingTime, + log, + ); const l1Constants = { l1GenesisTime, slotDuration: Number(slotDuration), ethereumSlotDuration, rollupManaLimit }; @@ -180,7 +183,7 @@ export class SequencerClient { deps.dateProvider, epochCache, rollupContract, - { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas }, + { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }, telemetryClient, log, ); @@ -242,69 +245,91 @@ export class SequencerClient { get maxL2BlockGas(): number | undefined { return this.sequencer.maxL2BlockGas; } +} - /** - * Computes per-block L2 and DA gas budgets based on the L1 rollup limits and the timetable. - * If the user explicitly set a limit, it is capped at the corresponding checkpoint limit. - * Otherwise, derives it as (checkpointLimit / maxBlocks) * multiplier, capped at the checkpoint limit. - */ - private static computeBlockGasLimits( - config: SequencerClientConfig, - rollupManaLimit: number, - l1PublishingTime: number, - log: ReturnType, - ): { maxL2BlockGas: number; maxDABlockGas: number } { - const maxNumberOfBlocks = new SequencerTimetable({ - ethereumSlotDuration: config.ethereumSlotDuration, - aztecSlotDuration: config.aztecSlotDuration, - l1PublishingTime, - p2pPropagationTime: config.attestationPropagationTime, - blockDurationMs: config.blockDurationMs, - enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, - }).maxNumberOfBlocks; +/** + * Computes per-block L2 gas, DA gas, and TX count budgets based on the L1 rollup limits and the timetable. + * If the user explicitly set a limit, it is capped at the corresponding checkpoint limit. + * Otherwise, derives it as (checkpointLimit / maxBlocks) * multiplier, capped at the checkpoint limit. + */ +export function computeBlockLimits( + config: SequencerClientConfig, + rollupManaLimit: number, + l1PublishingTime: number, + log: ReturnType, +): { maxL2BlockGas: number; maxDABlockGas: number; maxTxsPerBlock: number } { + const maxNumberOfBlocks = new SequencerTimetable({ + ethereumSlotDuration: config.ethereumSlotDuration, + aztecSlotDuration: config.aztecSlotDuration, + l1PublishingTime, + p2pPropagationTime: config.attestationPropagationTime, + blockDurationMs: config.blockDurationMs, + enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, + }).maxNumberOfBlocks; - const multiplier = config.gasPerBlockAllocationMultiplier ?? DefaultSequencerConfig.gasPerBlockAllocationMultiplier; + const multiplier = config.gasPerBlockAllocationMultiplier ?? DefaultSequencerConfig.gasPerBlockAllocationMultiplier; - // Compute maxL2BlockGas - let maxL2BlockGas: number; - if (config.maxL2BlockGas !== undefined) { - if (config.maxL2BlockGas > rollupManaLimit) { - log.warn( - `Provided MAX_L2_BLOCK_GAS ${config.maxL2BlockGas} exceeds L1 rollup mana limit ${rollupManaLimit} (capping)`, - ); - maxL2BlockGas = rollupManaLimit; - } else { - maxL2BlockGas = config.maxL2BlockGas; - } + // Compute maxL2BlockGas + let maxL2BlockGas: number; + if (config.maxL2BlockGas !== undefined) { + if (config.maxL2BlockGas > rollupManaLimit) { + log.warn( + `Provided MAX_L2_BLOCK_GAS ${config.maxL2BlockGas} exceeds L1 rollup mana limit ${rollupManaLimit} (capping)`, + ); + maxL2BlockGas = rollupManaLimit; } else { - maxL2BlockGas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxNumberOfBlocks) * multiplier)); + maxL2BlockGas = config.maxL2BlockGas; } + } else { + maxL2BlockGas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxNumberOfBlocks) * multiplier)); + } - // Compute maxDABlockGas - const daCheckpointLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; - let maxDABlockGas: number; - if (config.maxDABlockGas !== undefined) { - if (config.maxDABlockGas > daCheckpointLimit) { - log.warn( - `Provided MAX_DA_BLOCK_GAS ${config.maxDABlockGas} exceeds DA checkpoint limit ${daCheckpointLimit} (capping)`, - ); - maxDABlockGas = daCheckpointLimit; - } else { - maxDABlockGas = config.maxDABlockGas; - } + // Compute maxDABlockGas + const daCheckpointLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + let maxDABlockGas: number; + if (config.maxDABlockGas !== undefined) { + if (config.maxDABlockGas > daCheckpointLimit) { + log.warn( + `Provided MAX_DA_BLOCK_GAS ${config.maxDABlockGas} exceeds DA checkpoint limit ${daCheckpointLimit} (capping)`, + ); + maxDABlockGas = daCheckpointLimit; } else { - maxDABlockGas = Math.min(daCheckpointLimit, Math.ceil((daCheckpointLimit / maxNumberOfBlocks) * multiplier)); + maxDABlockGas = config.maxDABlockGas; } + } else { + maxDABlockGas = Math.min(daCheckpointLimit, Math.ceil((daCheckpointLimit / maxNumberOfBlocks) * multiplier)); + } - log.info(`Computed block gas limits L2=${maxL2BlockGas} DA=${maxDABlockGas}`, { - maxL2BlockGas, - maxDABlockGas, - rollupManaLimit, - daCheckpointLimit, - maxNumberOfBlocks, - multiplier, - }); - - return { maxL2BlockGas, maxDABlockGas }; + // Compute maxTxsPerBlock + const defaultMaxTxsPerBlock = 32; + let maxTxsPerBlock: number; + if (config.maxTxsPerBlock !== undefined) { + if (config.maxTxsPerCheckpoint !== undefined && config.maxTxsPerBlock > config.maxTxsPerCheckpoint) { + log.warn( + `Provided MAX_TX_PER_BLOCK ${config.maxTxsPerBlock} exceeds MAX_TX_PER_CHECKPOINT ${config.maxTxsPerCheckpoint} (capping)`, + ); + maxTxsPerBlock = config.maxTxsPerCheckpoint; + } else { + maxTxsPerBlock = config.maxTxsPerBlock; + } + } else if (config.maxTxsPerCheckpoint !== undefined) { + maxTxsPerBlock = Math.min( + config.maxTxsPerCheckpoint, + Math.ceil((config.maxTxsPerCheckpoint / maxNumberOfBlocks) * multiplier), + ); + } else { + maxTxsPerBlock = defaultMaxTxsPerBlock; } + + log.info(`Computed block limits L2=${maxL2BlockGas} DA=${maxDABlockGas} maxTxs=${maxTxsPerBlock}`, { + maxL2BlockGas, + maxDABlockGas, + maxTxsPerBlock, + rollupManaLimit, + daCheckpointLimit, + maxNumberOfBlocks, + multiplier, + }); + + return { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }; } diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 8268d92245a3..f020431f90a3 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -13,7 +13,6 @@ import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type ChainConfig, - DEFAULT_MAX_TXS_PER_BLOCK, type SequencerConfig, chainConfigMappings, sharedSequencerConfigMappings, @@ -38,7 +37,6 @@ export type { SequencerConfig }; */ export const DefaultSequencerConfig = { sequencerPollingIntervalMS: 500, - maxTxsPerBlock: DEFAULT_MAX_TXS_PER_BLOCK, minTxsPerBlock: 1, buildCheckpointIfEmpty: false, publishTxsWithProposals: false, @@ -78,6 +76,11 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'The number of ms to wait between polling for checking to build on the next slot.', ...numberConfigHelper(DefaultSequencerConfig.sequencerPollingIntervalMS), }, + maxTxsPerCheckpoint: { + env: 'SEQ_MAX_TX_PER_CHECKPOINT', + description: 'The maximum number of txs across all blocks in a checkpoint.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, minTxsPerBlock: { env: 'SEQ_MIN_TX_PER_BLOCK', description: 'The minimum number of txs to include in a block.', diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index d461b68c30b4..4ad3f9df3d18 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -271,6 +271,8 @@ export class CheckpointProposalJob implements Traceable { rollupManaLimit: this.l1Constants.rollupManaLimit, maxL2BlockGas: this.config.maxL2BlockGas, maxDABlockGas: this.config.maxDABlockGas, + maxTxsPerBlock: this.config.maxTxsPerBlock, + maxTxsPerCheckpoint: this.config.maxTxsPerCheckpoint, }); } catch (err) { this.log.error(`Built an invalid checkpoint at slot ${this.slot} (skipping proposal)`, err, { @@ -565,7 +567,7 @@ export class CheckpointProposalJob implements Traceable { ); this.setStateFn(SequencerState.CREATING_BLOCK, this.slot); - // Per-block limits derived at startup by SequencerClient.computeBlockGasLimits(), further capped + // Per-block limits derived at startup by computeBlockLimits(), further capped // by remaining checkpoint-level budgets inside CheckpointBuilder before each block is built. const blockBuilderOptions: PublicProcessorLimits = { maxTransactions: this.config.maxTxsPerBlock, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 4f8e011c1c82..464340b385ff 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -873,7 +873,7 @@ describe('sequencer', () => { sequencer.updateConfig({ enforceTimeTable: true, maxTxsPerBlock: 4, blockDurationMs: 500 }); const txs = await timesParallel(8, i => makeTx(i * 0x10000)); - block = await makeBlock(txs); + block = await makeBlock(txs.slice(0, 4)); TestUtils.mockPendingTxs(p2p, txs); await sequencer.work(); diff --git a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts index 9baf133dc1fc..3063bfd0f90c 100644 --- a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts +++ b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts @@ -1,5 +1,6 @@ -import { type BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { type BlockNumber, CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; +import { unfreeze } from '@aztec/foundation/types'; import { L2Block } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; import type { @@ -85,8 +86,10 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { let usedTxs: Tx[]; if (this.blockProvider) { - // Dynamic mode: get block from provider - block = this.blockProvider(); + // Dynamic mode: get block from provider, cloning to avoid shared references across multiple buildBlock calls + block = L2Block.fromBuffer(this.blockProvider().toBuffer()); + block.header.globalVariables.blockNumber = blockNumber; + await block.header.recomputeHash(); usedTxs = []; this.builtBlocks.push(block); } else { @@ -122,69 +125,69 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { completeCheckpoint(): Promise { this.completeCheckpointCalled = true; const allBlocks = this.blockProvider ? this.builtBlocks : this.blocks; - const lastBlock = allBlocks[allBlocks.length - 1]; - // Create a CheckpointHeader from the last block's header for testing - const checkpointHeader = this.createCheckpointHeader(lastBlock); - return Promise.resolve( - new Checkpoint( - makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), - checkpointHeader, - allBlocks, - this.checkpointNumber, - ), - ); + return this.buildCheckpoint(allBlocks); } getCheckpoint(): Promise { this.getCheckpointCalled = true; const builtBlocks = this.blockProvider ? this.builtBlocks : this.blocks.slice(0, this.blockIndex); - const lastBlock = builtBlocks[builtBlocks.length - 1]; - if (!lastBlock) { + if (builtBlocks.length === 0) { throw new Error('No blocks built yet'); } - // Create a CheckpointHeader from the last block's header for testing - const checkpointHeader = this.createCheckpointHeader(lastBlock); - return Promise.resolve( - new Checkpoint( - makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), - checkpointHeader, - builtBlocks, - this.checkpointNumber, - ), - ); + return this.buildCheckpoint(builtBlocks); } - /** - * Creates a CheckpointHeader from a block's header for testing. - * This is a simplified version that creates a minimal CheckpointHeader. - */ - private createCheckpointHeader(block: L2Block): CheckpointHeader { - const header = block.header; - const gv = header.globalVariables; - return CheckpointHeader.empty({ - lastArchiveRoot: header.lastArchive.root, - blockHeadersHash: Fr.random(), // Use random for testing + /** Builds a structurally valid Checkpoint from a list of blocks, fixing up indexes and archive chaining. */ + private async buildCheckpoint(blocks: L2Block[]): Promise { + // Fix up indexWithinCheckpoint and archive chaining so the checkpoint passes structural validation. + for (let i = 0; i < blocks.length; i++) { + blocks[i].indexWithinCheckpoint = IndexWithinCheckpoint(i); + if (i > 0) { + unfreeze(blocks[i].header).lastArchive = blocks[i - 1].archive; + await blocks[i].header.recomputeHash(); + } + } + + const firstBlock = blocks[0]; + const lastBlock = blocks[blocks.length - 1]; + const gv = firstBlock.header.globalVariables; + + const checkpointHeader = CheckpointHeader.empty({ + lastArchiveRoot: firstBlock.header.lastArchive.root, + blockHeadersHash: Fr.random(), slotNumber: gv.slotNumber, timestamp: gv.timestamp, coinbase: gv.coinbase, feeRecipient: gv.feeRecipient, gasFees: gv.gasFees, - totalManaUsed: header.totalManaUsed, + totalManaUsed: lastBlock.header.totalManaUsed, }); + + return new Checkpoint( + makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), + checkpointHeader, + blocks, + this.checkpointNumber, + ); } - /** Reset for reuse in another test */ - reset(): void { - this.blocks = []; + /** Resets per-checkpoint state (built blocks, consumed txs) while preserving config (blockProvider, seeded blocks). */ + resetCheckpointState(): void { this.builtBlocks = []; - this.usedTxsPerBlock = []; this.blockIndex = 0; - this.buildBlockCalls = []; this.consumedTxHashes.clear(); this.completeCheckpointCalled = false; this.getCheckpointCalled = false; + } + + /** Reset for reuse in another test */ + reset(): void { + this.blocks = []; + this.usedTxsPerBlock = []; + this.buildBlockCalls = []; this.errorOnBuild = undefined; this.blockProvider = undefined; + this.resetCheckpointState(); } } @@ -273,6 +276,8 @@ export class MockCheckpointsBuilder implements ICheckpointsBuilder { if (!this.checkpointBuilder) { // Auto-create a builder if none was set this.checkpointBuilder = new MockCheckpointBuilder(constants, checkpointNumber); + } else { + this.checkpointBuilder.resetCheckpointState(); } return Promise.resolve(this.checkpointBuilder); diff --git a/yarn-project/stdlib/src/block/l2_block.ts b/yarn-project/stdlib/src/block/l2_block.ts index 362a36f996a5..e7c78f332a1d 100644 --- a/yarn-project/stdlib/src/block/l2_block.ts +++ b/yarn-project/stdlib/src/block/l2_block.ts @@ -176,7 +176,7 @@ export class L2Block { } & Partial[0]> = {}, ): Promise { const archive = new AppendOnlyTreeSnapshot(Fr.random(), blockNumber + 1); - const header = BlockHeader.random({ blockNumber, ...blockHeaderOverrides }); + const header = BlockHeader.random({ ...blockHeaderOverrides, blockNumber }); const body = await Body.random({ txsPerBlock, makeTxOptions, ...txOptions }); return new L2Block(archive, header, body, checkpointNumber, indexWithinCheckpoint); } diff --git a/yarn-project/stdlib/src/checkpoint/checkpoint.ts b/yarn-project/stdlib/src/checkpoint/checkpoint.ts index 2c95d3c0be4a..6f1159533cd1 100644 --- a/yarn-project/stdlib/src/checkpoint/checkpoint.ts +++ b/yarn-project/stdlib/src/checkpoint/checkpoint.ts @@ -6,7 +6,7 @@ import { IndexWithinCheckpoint, SlotNumber, } from '@aztec/foundation/branded-types'; -import { sum } from '@aztec/foundation/collection'; +import { pick, sum } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { BufferReader, serializeSignedBigInt, serializeToBuffer } from '@aztec/foundation/serialize'; import type { FieldsOf } from '@aztec/foundation/types'; @@ -152,10 +152,12 @@ export class Checkpoint { startBlockNumber?: number; previousArchive?: AppendOnlyTreeSnapshot; feeAssetPriceModifier?: bigint; + archive?: AppendOnlyTreeSnapshot; } & Partial[0]> & Partial[1]> = {}, ) { - const header = CheckpointHeader.random(options); + const headerOptions = previousArchive ? { lastArchiveRoot: previousArchive.root, ...options } : options; + const header = CheckpointHeader.random(headerOptions); // Create blocks sequentially to chain archive roots properly. // Each block's header.lastArchive must equal the previous block's archive. @@ -166,11 +168,18 @@ export class Checkpoint { indexWithinCheckpoint: IndexWithinCheckpoint(i), ...options, ...(lastArchive ? { lastArchive } : {}), + ...pick(header, 'slotNumber', 'timestamp', 'coinbase', 'feeRecipient', 'gasFees'), }); lastArchive = block.archive; blocks.push(block); } - return new Checkpoint(AppendOnlyTreeSnapshot.random(), header, blocks, checkpointNumber, feeAssetPriceModifier); + return new Checkpoint( + options.archive ?? AppendOnlyTreeSnapshot.random(), + header, + blocks, + checkpointNumber, + feeAssetPriceModifier, + ); } } diff --git a/yarn-project/stdlib/src/checkpoint/validate.test.ts b/yarn-project/stdlib/src/checkpoint/validate.test.ts new file mode 100644 index 000000000000..6dfa314dd0c3 --- /dev/null +++ b/yarn-project/stdlib/src/checkpoint/validate.test.ts @@ -0,0 +1,233 @@ +import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import { BlockNumber, CheckpointNumber, IndexWithinCheckpoint, SlotNumber } from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; + +import { jest } from '@jest/globals'; + +import { AztecAddress } from '../aztec-address/index.js'; +import { GasFees } from '../gas/index.js'; +import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; +import { BlockHeader } from '../tx/block_header.js'; +import { Checkpoint } from './checkpoint.js'; +import { CheckpointValidationError, validateCheckpoint, validateCheckpointStructure } from './validate.js'; + +describe('validateCheckpointStructure', () => { + const checkpointNumber = CheckpointNumber(1); + + const fixedSlot = SlotNumber(42); + const fixedCoinbase = EthAddress.random(); + const fixedFeeRecipient = AztecAddress.fromField(Fr.random()); + const fixedGasFees = GasFees.random(); + const fixedTimestamp = BigInt(Math.floor(Date.now() / 1000)); + + /** Builds a valid random checkpoint with the given number of blocks. All blocks share the same slot, + * coinbase, feeRecipient, gasFees, and timestamp, and the checkpoint header's lastArchiveRoot is + * aligned with the first block. */ + async function makeValidCheckpoint(numBlocks = 2): Promise { + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks, + startBlockNumber: 1, + slotNumber: fixedSlot, + coinbase: fixedCoinbase, + feeRecipient: fixedFeeRecipient, + gasFees: fixedGasFees, + timestamp: fixedTimestamp, + }); + // Align checkpoint header's lastArchiveRoot with the first block. + checkpoint.header.lastArchiveRoot = checkpoint.blocks[0].header.lastArchive.root; + return checkpoint; + } + + it('passes on a valid single-block checkpoint', async () => { + const checkpoint = await makeValidCheckpoint(1); + expect(() => validateCheckpointStructure(checkpoint)).not.toThrow(); + }); + + it('passes on a valid multi-block checkpoint', async () => { + const checkpoint = await makeValidCheckpoint(3); + expect(() => validateCheckpointStructure(checkpoint)).not.toThrow(); + }); + + it('throws when checkpoint slot does not match first block slot', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.header.slotNumber = SlotNumber(checkpoint.blocks[0].slot + 1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/all blocks must share the same slot/); + }); + + it('throws when checkpoint lastArchiveRoot does not match first block lastArchive root', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.header.lastArchiveRoot = AppendOnlyTreeSnapshot.random().root; + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/lastArchiveRoot does not match first block/); + }); + + it('throws on empty block list', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.blocks = []; + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow('Checkpoint has no blocks'); + }); + + it('throws when block count exceeds MAX_BLOCKS_PER_CHECKPOINT', async () => { + // Build 73 blocks (MAX_BLOCKS_PER_CHECKPOINT = 72) + const checkpoint = await makeValidCheckpoint(1); + // Reuse the single block to fill up 73 slots (structure checks happen before archive chaining in loop) + const block = checkpoint.blocks[0]; + checkpoint.blocks = Array.from({ length: 73 }, (_, i) => { + const cloned = Object.create(Object.getPrototypeOf(block), Object.getOwnPropertyDescriptors(block)); + cloned.indexWithinCheckpoint = IndexWithinCheckpoint(i); + return cloned; + }); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/exceeding limit of 72/); + }); + + it('throws when indexWithinCheckpoint is wrong', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Swap the indices + const block0 = checkpoint.blocks[0]; + block0.indexWithinCheckpoint = IndexWithinCheckpoint(1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/indexWithinCheckpoint/); + }); + + it('throws when block numbers are not sequential', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Manually set block[1] to a non-sequential number (block[0].number + 2) + const block1 = checkpoint.blocks[1]; + // Override block number via header globalVariables + const gv = block1.header.globalVariables; + gv.blockNumber = BlockNumber(gv.blockNumber + 2); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/not sequential/); + }); + + it('throws when archive roots are not chained', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Break chaining: replace block[1]'s header with a new one that has a random lastArchive + const block1 = checkpoint.blocks[1]; + block1.header = BlockHeader.from({ ...block1.header, lastArchive: AppendOnlyTreeSnapshot.random() }); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/lastArchive root does not match/); + }); + + it('throws when blocks have different slot numbers', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Change block[1]'s slot to something different + const block1 = checkpoint.blocks[1]; + block1.header.globalVariables.slotNumber = SlotNumber(block1.slot + 1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/all blocks must share the same slot/); + }); + + it('throws when a block global variables do not match checkpoint header', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Mutate coinbase on block[1] to something different from the checkpoint header + checkpoint.blocks[1].header.globalVariables.coinbase = EthAddress.random(); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/global variables.*do not match checkpoint header/); + }); +}); + +describe('validateCheckpoint — limits', () => { + const checkpointNumber = CheckpointNumber(1); + const fixedSlot = SlotNumber(42); + const fixedCoinbase = EthAddress.random(); + const fixedFeeRecipient = AztecAddress.fromField(Fr.random()); + const fixedGasFees = GasFees.random(); + const fixedTimestamp = BigInt(Math.floor(Date.now() / 1000)); + + /** A known mana value injected into every block, making assertions deterministic. */ + const specificMana = 1_000_000; + + /** Opts that leave all limits wide open so structural validity is tested in isolation. */ + const validOpts = { + rollupManaLimit: Number.MAX_SAFE_INTEGER, + maxL2BlockGas: undefined as number | undefined, + maxDABlockGas: undefined as number | undefined, + }; + + /** Builds a structurally valid single-block checkpoint with a known mana value. */ + async function makeCheckpoint(): Promise { + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks: 1, + startBlockNumber: 1, + slotNumber: fixedSlot, + coinbase: fixedCoinbase, + feeRecipient: fixedFeeRecipient, + gasFees: fixedGasFees, + timestamp: fixedTimestamp, + totalManaUsed: new Fr(specificMana), + }); + checkpoint.header.lastArchiveRoot = checkpoint.blocks[0].header.lastArchive.root; + return checkpoint; + } + + it('passes when all limits are within bounds', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, validOpts)).not.toThrow(); + }); + + it('throws when checkpoint mana exceeds rollupManaLimit', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana - 1 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana - 1 })).toThrow( + /mana cost.*exceeds rollup limit/, + ); + }); + + it('passes when checkpoint mana equals rollupManaLimit', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana })).not.toThrow(); + }); + + it('throws when checkpoint DA gas exceeds MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT', async () => { + const checkpoint = await makeCheckpoint(); + jest.spyOn(checkpoint.blocks[0], 'computeDAGasUsed').mockReturnValue(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT + 1); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(CheckpointValidationError); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(/DA gas cost.*exceeds limit/); + }); + + it('throws when checkpoint blob field count exceeds limit', async () => { + const checkpoint = await makeCheckpoint(); + const maxBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB; + jest.spyOn(checkpoint, 'toBlobFields').mockReturnValue(new Array(maxBlobFields + 1).fill(Fr.ZERO)); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(CheckpointValidationError); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(/blob field count.*exceeds limit/); + }); + + it('throws when a block L2 gas exceeds maxL2BlockGas', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: specificMana - 1 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: specificMana - 1 })).toThrow( + /L2 gas used.*exceeding limit/, + ); + }); + + it('skips per-block L2 gas check when maxL2BlockGas is undefined', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: undefined })).not.toThrow(); + }); + + it('throws when a block DA gas exceeds maxDABlockGas', async () => { + const checkpoint = await makeCheckpoint(); + jest.spyOn(checkpoint.blocks[0], 'computeDAGasUsed').mockReturnValue(1000); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: 999 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: 999 })).toThrow( + /DA gas used.*exceeding limit/, + ); + }); + + it('skips per-block DA gas check when maxDABlockGas is undefined', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: undefined })).not.toThrow(); + }); +}); diff --git a/yarn-project/stdlib/src/checkpoint/validate.ts b/yarn-project/stdlib/src/checkpoint/validate.ts index a89d9409f189..1ceb9fa4c102 100644 --- a/yarn-project/stdlib/src/checkpoint/validate.ts +++ b/yarn-project/stdlib/src/checkpoint/validate.ts @@ -2,6 +2,7 @@ import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECK import type { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { sum } from '@aztec/foundation/collection'; +import { MAX_BLOCKS_PER_CHECKPOINT } from '../deserialization/index.js'; import type { Checkpoint } from './checkpoint.js'; export class CheckpointValidationError extends Error { @@ -17,6 +18,7 @@ export class CheckpointValidationError extends Error { /** * Validates a checkpoint. Throws a CheckpointValidationError if any validation fails. + * - Validates structural integrity (non-empty, block count, sequential numbers, archive chaining, slot consistency) * - Validates checkpoint blob field count against maxBlobFields limit * - Validates total L2 gas used by checkpoint blocks against the Rollup contract mana limit * - Validates total DA gas used by checkpoint blocks against MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT @@ -25,24 +27,113 @@ export class CheckpointValidationError extends Error { export function validateCheckpoint( checkpoint: Checkpoint, opts: { - rollupManaLimit: number; - maxL2BlockGas: number | undefined; - maxDABlockGas: number | undefined; + rollupManaLimit?: number; + maxL2BlockGas?: number; + maxDABlockGas?: number; + maxTxsPerCheckpoint?: number; + maxTxsPerBlock?: number; }, ): void { + validateCheckpointStructure(checkpoint); validateCheckpointLimits(checkpoint, opts); validateCheckpointBlocksGasLimits(checkpoint, opts); } +/** + * Validates structural integrity of a checkpoint. + * - Non-empty block list + * - Block count within MAX_BLOCKS_PER_CHECKPOINT + * - Checkpoint slot matches the first block's slot + * - Checkpoint lastArchiveRoot matches the first block's lastArchive root + * - Sequential block numbers without gaps + * - Sequential indexWithinCheckpoint starting at 0 + * - Archive root chaining between consecutive blocks + * - Consistent slot number across all blocks + * - Global variables (slot, timestamp, coinbase, feeRecipient, gasFees) match checkpoint header for each block + */ +export function validateCheckpointStructure(checkpoint: Checkpoint): void { + const { blocks, number, slot } = checkpoint; + + if (blocks.length === 0) { + throw new CheckpointValidationError('Checkpoint has no blocks', number, slot); + } + + if (blocks.length > MAX_BLOCKS_PER_CHECKPOINT) { + throw new CheckpointValidationError( + `Checkpoint has ${blocks.length} blocks, exceeding limit of ${MAX_BLOCKS_PER_CHECKPOINT}`, + number, + slot, + ); + } + + const firstBlock = blocks[0]; + + if (!checkpoint.header.lastArchiveRoot.equals(firstBlock.header.lastArchive.root)) { + throw new CheckpointValidationError( + `Checkpoint lastArchiveRoot does not match first block's lastArchive root`, + number, + slot, + ); + } + + for (let i = 0; i < blocks.length; i++) { + const block = blocks[i]; + + if (block.indexWithinCheckpoint !== i) { + throw new CheckpointValidationError( + `Block at index ${i} has indexWithinCheckpoint ${block.indexWithinCheckpoint}, expected ${i}`, + number, + slot, + ); + } + + if (block.slot !== slot) { + throw new CheckpointValidationError( + `Block ${block.number} has slot ${block.slot}, expected ${slot} (all blocks must share the same slot)`, + number, + slot, + ); + } + + if (!checkpoint.header.matchesGlobalVariables(block.header.globalVariables)) { + throw new CheckpointValidationError( + `Block ${block.number} global variables (slot, timestamp, coinbase, feeRecipient, gasFees) do not match checkpoint header`, + number, + slot, + ); + } + + if (i > 0) { + const prev = blocks[i - 1]; + if (block.number !== prev.number + 1) { + throw new CheckpointValidationError( + `Block numbers are not sequential: block at index ${i - 1} has number ${prev.number}, block at index ${i} has number ${block.number}`, + number, + slot, + ); + } + + if (!block.header.lastArchive.root.equals(prev.archive.root)) { + throw new CheckpointValidationError( + `Block ${block.number} lastArchive root does not match archive root of block ${prev.number}`, + number, + slot, + ); + } + } + } +} + /** Validates checkpoint blocks gas limits */ function validateCheckpointBlocksGasLimits( checkpoint: Checkpoint, opts: { - maxL2BlockGas: number | undefined; - maxDABlockGas: number | undefined; + maxL2BlockGas?: number; + maxDABlockGas?: number; + maxTxsPerBlock?: number; }, ): void { - const { maxL2BlockGas, maxDABlockGas } = opts; + const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock } = opts; if (maxL2BlockGas !== undefined) { for (const block of checkpoint.blocks) { @@ -69,43 +160,68 @@ function validateCheckpointBlocksGasLimits( } } } + + if (maxTxsPerBlock !== undefined) { + for (const block of checkpoint.blocks) { + const blockTxCount = block.body.txEffects.length; + if (blockTxCount > maxTxsPerBlock) { + throw new CheckpointValidationError( + `Block ${block.number} in checkpoint has ${blockTxCount} txs exceeding limit of ${maxTxsPerBlock}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + } } -/** Validates checkpoint max blob fields and gas limits */ +/** Validates checkpoint max blob fields, gas limits, and tx limits */ function validateCheckpointLimits( checkpoint: Checkpoint, opts: { - rollupManaLimit: number; + rollupManaLimit?: number; + maxTxsPerCheckpoint?: number; }, ): void { - const { rollupManaLimit } = opts; + const { rollupManaLimit, maxTxsPerCheckpoint } = opts; const maxBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB; const maxDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; - const checkpointMana = sum(checkpoint.blocks.map(block => block.header.totalManaUsed.toNumber())); - if (checkpointMana > rollupManaLimit) { + if (rollupManaLimit !== undefined) { + const checkpointMana = sum(checkpoint.blocks.map(block => block.header.totalManaUsed.toNumber())); + if (checkpointMana > rollupManaLimit) { + throw new CheckpointValidationError( + `Checkpoint mana cost ${checkpointMana} exceeds rollup limit of ${rollupManaLimit}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + + const checkpointDAGas = sum(checkpoint.blocks.map(block => block.computeDAGasUsed())); + if (checkpointDAGas > maxDAGas) { throw new CheckpointValidationError( - `Checkpoint mana cost ${checkpointMana} exceeds rollup limit of ${rollupManaLimit}`, + `Checkpoint DA gas cost ${checkpointDAGas} exceeds limit of ${maxDAGas}`, checkpoint.number, checkpoint.slot, ); } - const checkpointDAGas = sum(checkpoint.blocks.map(block => block.computeDAGasUsed())); - if (checkpointDAGas > maxDAGas) { + const checkpointBlobFields = checkpoint.toBlobFields().length; + if (checkpointBlobFields > maxBlobFields) { throw new CheckpointValidationError( - `Checkpoint DA gas cost ${checkpointDAGas} exceeds limit of ${maxDAGas}`, + `Checkpoint blob field count ${checkpointBlobFields} exceeds limit of ${maxBlobFields}`, checkpoint.number, checkpoint.slot, ); } - if (maxBlobFields !== undefined) { - const checkpointBlobFields = checkpoint.toBlobFields().length; - if (checkpointBlobFields > maxBlobFields) { + if (maxTxsPerCheckpoint !== undefined) { + const checkpointTxCount = sum(checkpoint.blocks.map(block => block.body.txEffects.length)); + if (checkpointTxCount > maxTxsPerCheckpoint) { throw new CheckpointValidationError( - `Checkpoint blob field count ${checkpointBlobFields} exceeds limit of ${maxBlobFields}`, + `Checkpoint tx count ${checkpointTxCount} exceeds limit of ${maxTxsPerCheckpoint}`, checkpoint.number, checkpoint.slot, ); diff --git a/yarn-project/stdlib/src/config/sequencer-config.ts b/yarn-project/stdlib/src/config/sequencer-config.ts index 31d0eca9458a..77bdfd94ed82 100644 --- a/yarn-project/stdlib/src/config/sequencer-config.ts +++ b/yarn-project/stdlib/src/config/sequencer-config.ts @@ -1,4 +1,4 @@ -import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/config'; +import type { ConfigMappingsType } from '@aztec/foundation/config'; import type { SequencerConfig } from '../interfaces/configs.js'; @@ -32,6 +32,6 @@ export const sharedSequencerConfigMappings: ConfigMappingsType< maxTxsPerBlock: { env: 'SEQ_MAX_TX_PER_BLOCK', description: 'The maximum number of txs to include in a block.', - ...numberConfigHelper(DEFAULT_MAX_TXS_PER_BLOCK), + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), }, }; diff --git a/yarn-project/stdlib/src/interfaces/block-builder.ts b/yarn-project/stdlib/src/interfaces/block-builder.ts index 07a986ab2384..87ed1444fff5 100644 --- a/yarn-project/stdlib/src/interfaces/block-builder.ts +++ b/yarn-project/stdlib/src/interfaces/block-builder.ts @@ -61,6 +61,7 @@ export type FullNodeBlockBuilderConfig = Pick & { @@ -77,6 +78,7 @@ export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[ 'fakeProcessingDelayPerTxMs', 'fakeThrowAfterProcessingTxCount', 'maxTxsPerBlock', + 'maxTxsPerCheckpoint', 'maxL2BlockGas', 'maxDABlockGas', 'rollupManaLimit', diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index bb18db1ee5fc..a290e0c69b0d 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -13,6 +13,8 @@ export interface SequencerConfig { sequencerPollingIntervalMS?: number; /** The maximum number of txs to include in a block. */ maxTxsPerBlock?: number; + /** The maximum number of txs across all blocks in a checkpoint. */ + maxTxsPerCheckpoint?: number; /** The minimum number of txs to include in a block. */ minTxsPerBlock?: number; /** The minimum number of valid txs (after execution) to include in a block. If not set, falls back to minTxsPerBlock. */ @@ -85,6 +87,7 @@ export const SequencerConfigSchema = zodFor()( z.object({ sequencerPollingIntervalMS: z.number().optional(), maxTxsPerBlock: z.number().optional(), + maxTxsPerCheckpoint: z.number().optional(), minValidTxsPerBlock: z.number().optional(), minTxsPerBlock: z.number().optional(), maxL2BlockGas: z.number().optional(), @@ -135,6 +138,8 @@ type SequencerConfigOptionalKeys = | 'txPublicSetupAllowListExtend' | 'minValidTxsPerBlock' | 'minBlocksForCheckpoint' + | 'maxTxsPerBlock' + | 'maxTxsPerCheckpoint' | 'maxL2BlockGas' | 'maxDABlockGas' | 'gasPerBlockAllocationMultiplier'; diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index 79d33955c3d6..45b98431c5ca 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -427,10 +427,13 @@ export async function mockCheckpointAndMessages( Partial[1]> = {}, ) { const slotNumber = options.slotNumber ?? SlotNumber(Number(checkpointNumber) * 10); + const globals = GlobalVariables.random({ slotNumber, ...options }); const blocksAndMessages = []; + // Track the previous block's archive to ensure consecutive blocks have consistent archive roots. // The current block's header.lastArchive must equal the previous block's archive. let lastArchive: AppendOnlyTreeSnapshot | undefined = previousArchive; + // Pass maxEffects via txOptions so it reaches TxEffect.random const txOptions = maxEffects !== undefined ? { maxEffects } : {}; for (let i = 0; i < (blocks?.length ?? numBlocks); i++) { @@ -439,11 +442,11 @@ export async function mockCheckpointAndMessages( block: blocks?.[i] ?? (await L2Block.random(blockNumber, { + ...globals, checkpointNumber, indexWithinCheckpoint: IndexWithinCheckpoint(i), txsPerBlock: numTxsPerBlock, txOptions, - slotNumber, ...options, ...makeBlockOptions(blockNumber), ...(lastArchive ? { lastArchive } : {}), @@ -457,12 +460,18 @@ export async function mockCheckpointAndMessages( const messages = blocksAndMessages[0].messages; const inHash = computeInHashFromL1ToL2Messages(messages); - const checkpoint = await Checkpoint.random(checkpointNumber, { numBlocks: 0, slotNumber, inHash, ...options }); + const firstBlockLastArchive = blocksAndMessages[0].block.header.lastArchive; + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks: 0, + inHash, + ...options, + ...globals, + lastArchive: firstBlockLastArchive, + lastArchiveRoot: firstBlockLastArchive.root, + archive: lastArchive, + }); + checkpoint.blocks = blocksAndMessages.map(({ block }) => block); - // Set the checkpoint's archive to match the last block's archive for proper chaining. - // When the archiver reconstructs checkpoints from L1, it uses the checkpoint's archive root - // from the L1 event to set the last block's archive. Without this, the archive chain breaks. - checkpoint.archive = lastArchive!; // Return lastArchive so callers can chain it across multiple checkpoints return { checkpoint, messages, lastArchive }; diff --git a/yarn-project/stdlib/src/tx/block_header.ts b/yarn-project/stdlib/src/tx/block_header.ts index 1a1457e0f96e..6788df00ca98 100644 --- a/yarn-project/stdlib/src/tx/block_header.ts +++ b/yarn-project/stdlib/src/tx/block_header.ts @@ -176,6 +176,12 @@ export class BlockHeader { this._cachedHash = Promise.resolve(new BlockHash(hashed)); } + /** Recomputes the cached hash. Used for testing when header fields are mutated via unfreeze. */ + recomputeHash(): Promise { + this._cachedHash = undefined; + return this.hash(); + } + static random(overrides: Partial> & Partial> = {}): BlockHeader { return BlockHeader.from({ lastArchive: AppendOnlyTreeSnapshot.random(), diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index 9c25164b0c7c..c6891987fd6f 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -239,11 +239,11 @@ L1 enforces gas and blob capacity per checkpoint. The node enforces these during Per-block budgets prevent one block from consuming the entire checkpoint budget. -**Proposer**: `SequencerClient.computeBlockGasLimits()` derives budgets at startup as `min(checkpointLimit, ceil(checkpointLimit / maxBlocks * multiplier))`, where `maxBlocks` comes from the timetable and `multiplier` defaults to 2. The multiplier greater than 1 allows early blocks to use more than their even share of the checkpoint budget, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. Operators can override via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` (capped at checkpoint limits). +**Proposer**: `computeBlockLimits()` derives budgets at startup as `min(checkpointLimit, ceil(checkpointLimit / maxBlocks * multiplier))`, where `maxBlocks` comes from the timetable and `multiplier` defaults to 2. The multiplier greater than 1 allows early blocks to use more than their even share of the checkpoint budget, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. Operators can override via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` / `SEQ_MAX_TX_PER_BLOCK` (capped at checkpoint limits). Per-block TX limits follow the same derivation pattern when `SEQ_MAX_TX_PER_CHECKPOINT` is set. **Validator**: Does not enforce per-block gas budgets. Only checkpoint-level limits are checked, so that proposers can freely distribute capacity across blocks within a checkpoint. -**Checkpoint-level capping**: `CheckpointBuilder.capLimitsByCheckpointBudgets()` always runs before tx processing, capping per-block limits by `checkpointBudget - sum(used by prior blocks)` for all three dimensions. This applies to both proposer and validator paths. +**Checkpoint-level capping**: `CheckpointBuilder.capLimitsByCheckpointBudgets()` always runs before tx processing, capping per-block limits by `checkpointBudget - sum(used by prior blocks)` for all three gas dimensions and for transaction count (when `SEQ_MAX_TX_PER_CHECKPOINT` is set). This applies to both proposer and validator paths. ### Per-transaction enforcement @@ -257,6 +257,8 @@ Per-block budgets prevent one block from consuming the entire checkpoint budget. | --- | --- | --- | | `SEQ_MAX_L2_BLOCK_GAS` | *auto* | Per-block L2 gas. Auto-derived from `rollupManaLimit / maxBlocks * multiplier`. | | `SEQ_MAX_DA_BLOCK_GAS` | *auto* | Per-block DA gas. Auto-derived from checkpoint DA limit / maxBlocks * multiplier. | +| `SEQ_MAX_TX_PER_BLOCK` | *none* | Per-block tx count. If `SEQ_MAX_TX_PER_CHECKPOINT` is set and per-block is not, derived as `ceil(checkpointLimit / maxBlocks * multiplier)`. | +| `SEQ_MAX_TX_PER_CHECKPOINT` | *none* | Total txs across all blocks in a checkpoint. When set, per-block tx limit is derived from it (unless explicitly overridden) and checkpoint-level capping is enforced. | | `SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER` | 2 | Multiplier for per-block budget computation. | ## Testing Patterns diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index 19f307fa5a0b..7f2c640183dc 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -345,5 +345,75 @@ describe('CheckpointBuilder', () => { expect(afterTwoBlocks.maxBlobFields).toBeLessThan(afterOneBlock.maxBlobFields!); expect(afterOneBlock.maxBlobFields! - afterTwoBlocks.maxBlobFields!).toBe(block2BlobFieldCount); }); + + it('caps transaction count by remaining checkpoint tx budget', () => { + setupBuilder({ maxTxsPerCheckpoint: 20 }); + + // Prior block with 3 txs (each with 10 blob fields) + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10], blockBlobFieldCount: 40 }), + ]); + + const opts: PublicProcessorLimits = { maxTransactions: 15 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 20 - 3 = 17. Per-block = 15. Capped to min(15, 17) = 15. + expect(capped.maxTransactions).toBe(15); + }); + + it('caps transaction count when remaining budget is smaller than per-block limit', () => { + setupBuilder({ maxTxsPerCheckpoint: 10 }); + + // Two prior blocks with 4 txs each = 8 total + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10], blockBlobFieldCount: 50 }), + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10], blockBlobFieldCount: 50 }), + ]); + + const opts: PublicProcessorLimits = { maxTransactions: 5 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 10 - 8 = 2. Per-block = 5. Capped to min(5, 2) = 2. + expect(capped.maxTransactions).toBe(2); + }); + + it('sets transaction count from remaining budget when caller does not provide it', () => { + setupBuilder({ maxTxsPerCheckpoint: 15 }); + + // Prior block with 5 txs + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10, 10], blockBlobFieldCount: 60 }), + ]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 15 - 5 = 10 + expect(capped.maxTransactions).toBe(10); + }); + + it('does not cap transaction count when maxTxsPerCheckpoint is not set', () => { + setupBuilder(); // no maxTxsPerCheckpoint + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = { maxTransactions: 99 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Passthrough: maxTransactions = 99 + expect(capped.maxTransactions).toBe(99); + }); + + it('does not cap transaction count when maxTxsPerCheckpoint is not set and caller does not provide it', () => { + setupBuilder(); // no maxTxsPerCheckpoint + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Neither config nor caller sets it, so it remains undefined + expect(capped.maxTransactions).toBeUndefined(); + }); }); }); diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index 3c387b1da257..8448ff422bf7 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -158,7 +158,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { */ protected capLimitsByCheckpointBudgets( opts: PublicProcessorLimits, - ): Pick { + ): Pick { const existingBlocks = this.checkpointBuilder.getBlocks(); // Remaining L2 gas (mana) @@ -188,9 +188,21 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { const cappedBlobFields = opts.maxBlobFields !== undefined ? Math.min(opts.maxBlobFields, maxBlobFieldsForTxs) : maxBlobFieldsForTxs; + // Cap transaction count by remaining checkpoint tx budget + let cappedMaxTransactions: number | undefined; + if (this.config.maxTxsPerCheckpoint !== undefined) { + const usedTxs = sum(existingBlocks.map(b => b.body.txEffects.length)); + const remainingTxs = Math.max(0, this.config.maxTxsPerCheckpoint - usedTxs); + cappedMaxTransactions = + opts.maxTransactions !== undefined ? Math.min(opts.maxTransactions, remainingTxs) : remainingTxs; + } else { + cappedMaxTransactions = opts.maxTransactions; + } + return { maxBlockGas: new Gas(cappedDAGas, cappedL2Gas), maxBlobFields: cappedBlobFields, + maxTransactions: cappedMaxTransactions, }; } diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts index cba52926ec05..80c7bd532974 100644 --- a/yarn-project/validator-client/src/validator.ha.integration.test.ts +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -193,6 +193,7 @@ describe('ValidatorClient HA Integration', () => { const metrics = new ValidatorMetrics(getTelemetryClient()); const blockProposalValidator = new BlockProposalValidator(epochCache, { txsPermitted: true, + maxTxsPerBlock: undefined, }); const blockProposalHandler = new BlockProposalHandler( checkpointsBuilder, From 87f196cad80b2f90eeadd70aa5fb4b4e0e7ef721 Mon Sep 17 00:00:00 2001 From: AztecBot <49558828+AztecBot@users.noreply.github.com> Date: Tue, 3 Mar 2026 23:45:11 +0000 Subject: [PATCH 14/37] fix: drop --pid=host from docker_isolate `--pid=host` no longer needed, and was a bit suss Drop some flakes Co-authored-by: ludamad --- .test_patterns.yml | 21 --------------------- ci3/docker_isolate | 2 -- 2 files changed, 23 deletions(-) diff --git a/.test_patterns.yml b/.test_patterns.yml index e857f75b878a..6eb021c46794 100644 --- a/.test_patterns.yml +++ b/.test_patterns.yml @@ -56,27 +56,6 @@ tests: error_regex: "field_t::range_constraint" owners: - *luke - - regex: "barretenberg/acir_tests/scripts/browser_prove.sh" - error_regex: "Failed to fetch" - owners: - - *adam - - regex: "barretenberg/acir_tests/scripts/browser_prove.sh" - error_regex: "RuntimeError: Out of bounds memory access" - owners: - - *adam - - regex: "barretenberg/acir_tests/scripts/browser_prove.sh" - error_regex: "call_indirect to a null table entry" - owners: - - *adam - - regex: "barretenberg/acir_tests/scripts/browser_prove.sh" - error_regex: "Input is not large enough" - owners: - - *adam - # https://gist.github.com/spalladino/4fd3d2abd7b7fb05be2e556649868626 - - regex: "barretenberg/acir_tests/scripts/browser_prove.sh" - error_regex: "sending signal TERM to command" - owners: - - *adam - regex: "barretenberg/cpp/scripts/run_bench.sh wasm bb-micro-bench/wasm/ultra_honk" error_regex: "Aborted.*core dumped" owners: diff --git a/ci3/docker_isolate b/ci3/docker_isolate index 07fd643546a4..8628d499e9e2 100755 --- a/ci3/docker_isolate +++ b/ci3/docker_isolate @@ -55,7 +55,6 @@ fi # Launch the container in the background. # Don't launch in the foreground or you can't process SIGINT/SIGTERM. # Don't use & as we want to block, and be sure it starts before processing any signals. -# We use --pid=host so that we can rely on $$ in bash scripts being (temporally) unique. set -x cid=$(docker run -d \ ${name_arg:-} \ @@ -63,7 +62,6 @@ cid=$(docker run -d \ ${cpuset_arg:-} \ --cpus=$CPUS \ --memory=$MEM \ - --pid=host \ --user $(id -u):$(id -g) \ -v$HOME:$HOME \ $tmp_mount \ From 22bcddb0564f33d103a614624cd5c2a8ad17eb54 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Wed, 4 Mar 2026 00:56:49 +0000 Subject: [PATCH 15/37] fix: use mktemp for output dirs in acir_tests scripts Replace output-$$ with mktemp -d to avoid PID collisions across docker containers sharing $HOME (without --pid=host, containers get low PIDs like 7, causing output-7 directory collisions). --- barretenberg/acir_tests/scripts/bb_prove.sh | 24 +++++++++---------- .../scripts/bb_prove_bbjs_verify.sh | 12 +++++----- .../acir_tests/scripts/bb_prove_sol_verify.sh | 18 +++++++------- .../scripts/bbjs_legacy_cli_prove.sh | 12 +++++----- barretenberg/acir_tests/scripts/bbjs_prove.sh | 8 +++---- .../scripts/bbjs_prove_bb_verify.sh | 12 +++++----- .../scripts/bbjs_prove_sol_verify.sh | 14 +++++------ 7 files changed, 50 insertions(+), 50 deletions(-) diff --git a/barretenberg/acir_tests/scripts/bb_prove.sh b/barretenberg/acir_tests/scripts/bb_prove.sh index 04097fa341b7..0dbe67def63a 100755 --- a/barretenberg/acir_tests/scripts/bb_prove.sh +++ b/barretenberg/acir_tests/scripts/bb_prove.sh @@ -12,31 +12,31 @@ shift # Base flags + our commandline args flags="-v --scheme ultra_honk $*" -mkdir -p output-$$ -trap "rm -rf output-$$" EXIT +output_dir=$(mktemp -d ./output-XXXXXX) +trap "rm -rf $output_dir" EXIT # Tests prefixed with failing_ are expected to fail. if [[ $test_name == failing_* ]]; then - if $bb write_vk $flags -b target/program.json -o output-$$ && \ - $bb prove $flags -b target/program.json -k output-$$/vk -o output-$$ && \ + if $bb write_vk $flags -b target/program.json -o $output_dir && \ + $bb prove $flags -b target/program.json -k $output_dir/vk -o $output_dir && \ $bb verify $flags \ - -k output-$$/vk \ - -p output-$$/proof \ - -i output-$$/public_inputs; then + -k $output_dir/vk \ + -p $output_dir/proof \ + -i $output_dir/public_inputs; then echo "ERROR: Expected test '$test_name' to fail, but it passed!" exit 1 fi else # Generate VK - $bb write_vk $flags -b target/program.json -o output-$$ + $bb write_vk $flags -b target/program.json -o $output_dir # Prove - $bb prove $flags -b target/program.json -k output-$$/vk -o output-$$ + $bb prove $flags -b target/program.json -k $output_dir/vk -o $output_dir # Verify $bb verify $flags \ - -k output-$$/vk \ - -p output-$$/proof \ - -i output-$$/public_inputs + -k $output_dir/vk \ + -p $output_dir/proof \ + -i $output_dir/public_inputs fi diff --git a/barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh b/barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh index 9b8b1eb2139d..5b8efc4a1121 100755 --- a/barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh +++ b/barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh @@ -7,23 +7,23 @@ export HARDWARE_CONCURRENCY=8 bb=$(../../../cpp/scripts/find-bb) -mkdir -p output-$$ -trap "rm -rf output-$$" EXIT +output_dir=$(mktemp -d ./output-XXXXXX) +trap "rm -rf $output_dir" EXIT # Generate the VK using BB CLI $bb write_vk \ --scheme ultra_honk \ -b target/program.json \ - -o output-$$ + -o $output_dir # Generate the proof using BB CLI (save as both bytes and fields) $bb prove \ --scheme ultra_honk \ -b target/program.json \ -w target/witness.gz \ - -k output-$$/vk \ - -o output-$$ + -k $output_dir/vk \ + -o $output_dir # Verify the proof with bb.js classes node ../../bbjs-test verify \ - -d output-$$ + -d $output_dir diff --git a/barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh b/barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh index c33a4bf988fe..3abf835ad78d 100755 --- a/barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh +++ b/barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh @@ -30,21 +30,21 @@ else has_zk="true" fi -mkdir -p output-$$ -trap "rm -rf output-$$" EXIT +output_dir=$(mktemp -d ./output-XXXXXX) +trap "rm -rf $output_dir" EXIT # Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs -$bb prove $flags -b target/program.json --oracle_hash keccak --write_vk -o output-$$ -$bb verify $flags --oracle_hash keccak -i output-$$/public_inputs -k output-$$/vk -p output-$$/proof -$bb write_solidity_verifier $write_contract_flags -k output-$$/vk -o output-$$/Verifier.sol +$bb prove $flags -b target/program.json --oracle_hash keccak --write_vk -o $output_dir +$bb verify $flags --oracle_hash keccak -i $output_dir/public_inputs -k $output_dir/vk -p $output_dir/proof +$bb write_solidity_verifier $write_contract_flags -k $output_dir/vk -o $output_dir/Verifier.sol # Use solcjs to compile the generated key contract with the template verifier and test contract # index.js will start an anvil, on a random port # Deploy the verifier then send a test transaction -PROOF="output-$$/proof" \ -PUBLIC_INPUTS="output-$$/public_inputs" \ -VERIFIER_PATH="output-$$/Verifier.sol" \ +PROOF="$output_dir/proof" \ +PUBLIC_INPUTS="$output_dir/public_inputs" \ +VERIFIER_PATH="$output_dir/Verifier.sol" \ TEST_PATH="../../sol-test/HonkTest.sol" \ HAS_ZK="$has_zk" \ -TEST_NAME=$(basename output-$$) \ +TEST_NAME=$(basename $output_dir) \ node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/scripts/bbjs_legacy_cli_prove.sh b/barretenberg/acir_tests/scripts/bbjs_legacy_cli_prove.sh index e9fa668f75ac..0fa48eb91755 100755 --- a/barretenberg/acir_tests/scripts/bbjs_legacy_cli_prove.sh +++ b/barretenberg/acir_tests/scripts/bbjs_legacy_cli_prove.sh @@ -8,16 +8,16 @@ cd ../acir_tests/$1 # NOTE The bb.js main file is deprecated! bbjs_bin="../../../ts/dest/node/main.js" -mkdir -p output-$$ -trap "rm -rf output-$$" EXIT +output_dir=$(mktemp -d ./output-XXXXXX) +trap "rm -rf $output_dir" EXIT # Generate VK -node $bbjs_bin write_vk_ultra_honk -v -b target/program.json -o output-$$/vk +node $bbjs_bin write_vk_ultra_honk -v -b target/program.json -o $output_dir/vk # Prove -node $bbjs_bin prove_ultra_honk -o output-$$/proof -v -b target/program.json -k output-$$/vk +node $bbjs_bin prove_ultra_honk -o $output_dir/proof -v -b target/program.json -k $output_dir/vk # Verify node $bbjs_bin verify_ultra_honk -v \ - -k output-$$/vk \ - -p output-$$/proof + -k $output_dir/vk \ + -p $output_dir/proof diff --git a/barretenberg/acir_tests/scripts/bbjs_prove.sh b/barretenberg/acir_tests/scripts/bbjs_prove.sh index d67f4330909d..6e382a484d80 100755 --- a/barretenberg/acir_tests/scripts/bbjs_prove.sh +++ b/barretenberg/acir_tests/scripts/bbjs_prove.sh @@ -5,16 +5,16 @@ cd ../acir_tests/$1 export HARDWARE_CONCURRENCY=8 -mkdir -p output-$$ -trap "rm -rf output-$$" EXIT +output_dir=$(mktemp -d ./output-XXXXXX) +trap "rm -rf $output_dir" EXIT # Writes the proof, public inputs ./target; this also writes the VK node ../../bbjs-test prove \ -b target/program.json \ -w target/witness.gz \ - -o output-$$ \ + -o $output_dir \ --multi-threaded # Verify the proof by reading the files in ./target node ../../bbjs-test verify \ - -d output-$$ + -d $output_dir diff --git a/barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh b/barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh index b809adf477dc..700f94ade259 100755 --- a/barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh +++ b/barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh @@ -5,14 +5,14 @@ export HARDWARE_CONCURRENCY=8 cd ../acir_tests/$1 -mkdir -p output-$$ -trap "rm -rf output-$$" EXIT +output_dir=$(mktemp -d ./output-XXXXXX) +trap "rm -rf $output_dir" EXIT # Writes the proof, public inputs ./target; this also writes the VK node ../../bbjs-test prove \ -b target/program.json \ -w target/witness.gz \ - -o output-$$ + -o $output_dir # The proof and public_inputs are already in binary format from bbjs-test @@ -20,6 +20,6 @@ bb=$(../../../cpp/scripts/find-bb) # Verify the proof with bb cli $bb verify \ --scheme ultra_honk \ - -k output-$$/vk \ - -p output-$$/proof \ - -i output-$$/public_inputs + -k $output_dir/vk \ + -p $output_dir/proof \ + -i $output_dir/public_inputs diff --git a/barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh b/barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh index 1574b00cbb97..70e455697363 100755 --- a/barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh +++ b/barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh @@ -5,14 +5,14 @@ export HARDWARE_CONCURRENCY=8 cd ../acir_tests/$1 -mkdir -p output-$$ -trap "rm -rf output-$$" EXIT +output_dir=$(mktemp -d ./output-XXXXXX) +trap "rm -rf $output_dir" EXIT # Generate the proof and VK node ../../bbjs-test prove \ -b target/program.json \ -w target/witness.gz \ - -o output-$$ \ + -o $output_dir \ --oracle-hash $oracle_hash bb=$(../../../cpp/scripts/find-bb) @@ -31,12 +31,12 @@ for arg in "$@"; do done # Use the BB CLI to write the solidity verifier - this can also be done with bb.js -$bb write_solidity_verifier --scheme ultra_honk -k output-$$/vk -o output-$$/Verifier.sol +$bb write_solidity_verifier --scheme ultra_honk -k $output_dir/vk -o $output_dir/Verifier.sol # Verify the proof using the solidity verifier -PROOF="output-$$/proof" \ -PUBLIC_INPUTS="output-$$/public_inputs" \ -VERIFIER_PATH="output-$$/Verifier.sol" \ +PROOF="$output_dir/proof" \ +PUBLIC_INPUTS="$output_dir/public_inputs" \ +VERIFIER_PATH="$output_dir/Verifier.sol" \ TEST_PATH="../../sol-test/HonkTest.sol" \ HAS_ZK="$has_zk" \ TEST_NAME=$(basename $(realpath .)) \ From 928846b2b919ef0e06009c059c5855064023c239 Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Wed, 4 Mar 2026 13:31:54 +0100 Subject: [PATCH 16/37] feat: standby mode for prover broker (#21098) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Extracts the node's rollup standby logic into a shared module (`standby.ts`) so both the node and the prover broker can wait for a compatible L1 rollup before starting - Adds standby mode to the prover broker startup — previously it would crash on `getL1Config()` if the canonical rollup was incompatible (e.g. during L1 contract upgrades) - Reduces the standby poll interval from 600s to 60s and replaces the manual `while(true) + sleep` loop with `retryUntil` from `@aztec/foundation/retry` (both suggested as review feedback on #20937) ## Changes **New: `aztec/src/cli/cmds/standby.ts`** - `computeExpectedGenesisRoot(config, userLog)` — computes genesis archive root from test accounts, sponsored FPC, and prefund addresses - `waitForCompatibleRollup(config, expectedGenesisRoot, port, userLog)` — checks L1 rollup genesis root, enters standby with a K8s health server if mismatch, polls every 60s via `retryUntil` **Modified: `aztec/src/cli/cmds/start_node.ts`** - Removed inline `waitForCompatibleRollup` function and genesis computation logic - Now imports and calls the shared functions from `standby.ts` - Cleaned up unused imports (`RegistryContract`, `RollupContract`, `sleep`, `Koa`, etc.) **Modified: `aztec/src/cli/cmds/start_prover_broker.ts`** - Added `computeExpectedGenesisRoot` + `waitForCompatibleRollup` calls before `getL1Config()`, matching the node's pattern Fixes A-608 --- yarn-project/aztec/src/cli/cmds/standby.ts | 111 ++++++++++++++++++ yarn-project/aztec/src/cli/cmds/start_node.ts | 101 +--------------- .../aztec/src/cli/cmds/start_prover_broker.ts | 6 + 3 files changed, 122 insertions(+), 96 deletions(-) create mode 100644 yarn-project/aztec/src/cli/cmds/standby.ts diff --git a/yarn-project/aztec/src/cli/cmds/standby.ts b/yarn-project/aztec/src/cli/cmds/standby.ts new file mode 100644 index 000000000000..c94605c16d80 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/standby.ts @@ -0,0 +1,111 @@ +import { getInitialTestAccountsData } from '@aztec/accounts/testing'; +import type { Fr } from '@aztec/aztec.js/fields'; +import { getSponsoredFPCAddress } from '@aztec/cli/cli-utils'; +import { getPublicClient } from '@aztec/ethereum/client'; +import type { GenesisStateConfig } from '@aztec/ethereum/config'; +import { RegistryContract, RollupContract } from '@aztec/ethereum/contracts'; +import type { EthAddress } from '@aztec/foundation/eth-address'; +import { startHttpRpcServer } from '@aztec/foundation/json-rpc/server'; +import type { LogFn } from '@aztec/foundation/log'; +import { retryUntil } from '@aztec/foundation/retry'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { getGenesisValues } from '@aztec/world-state/testing'; + +import Koa from 'koa'; + +const ROLLUP_POLL_INTERVAL_S = 60; + +/** + * Computes the expected genesis archive root from the genesis state config. + * Reads test accounts and sponsored FPC addresses as specified, then computes + * the genesis values including the archive root and prefilled public data. + */ +export async function computeExpectedGenesisRoot(config: GenesisStateConfig, userLog: LogFn) { + const testAccounts = config.testAccounts ? (await getInitialTestAccountsData()).map(a => a.address) : []; + const sponsoredFPCAccounts = config.sponsoredFPC ? [await getSponsoredFPCAddress()] : []; + const prefundAddresses = (config.prefundAddresses ?? []).map(a => AztecAddress.fromString(a)); + const initialFundedAccounts = testAccounts.concat(sponsoredFPCAccounts).concat(prefundAddresses); + + userLog(`Initial funded accounts: ${initialFundedAccounts.map(a => a.toString()).join(', ')}`); + + const { genesisArchiveRoot, prefilledPublicData } = await getGenesisValues(initialFundedAccounts); + + userLog(`Genesis archive root: ${genesisArchiveRoot.toString()}`); + + return { genesisArchiveRoot, prefilledPublicData }; +} + +/** + * Waits until the canonical rollup's genesis archive root matches the expected local genesis root. + * If the rollup is not yet compatible (e.g. during L1 contract upgrades), enters standby mode: + * starts a lightweight HTTP server for K8s liveness probes and polls every 60s until a compatible rollup appears. + */ +export async function waitForCompatibleRollup( + config: { + l1RpcUrls: string[]; + l1ChainId: number; + l1Contracts: { registryAddress: EthAddress }; + rollupVersion?: number; + }, + expectedGenesisRoot: Fr, + port: number | undefined, + userLog: LogFn, +): Promise { + const publicClient = getPublicClient(config); + const rollupVersion: number | 'canonical' = config.rollupVersion ?? 'canonical'; + + const registry = new RegistryContract(publicClient, config.l1Contracts.registryAddress); + const rollupAddress = await registry.getRollupAddress(rollupVersion); + const rollup = new RollupContract(publicClient, rollupAddress.toString()); + + let l1GenesisRoot: Fr; + try { + l1GenesisRoot = await rollup.getGenesisArchiveTreeRoot(); + } catch (err: any) { + throw new Error( + `Could not retrieve genesis archive root from canonical rollup at ${rollupAddress}: ${err.message}`, + ); + } + + if (l1GenesisRoot.equals(expectedGenesisRoot)) { + return; + } + + userLog( + `Genesis root mismatch: expected ${expectedGenesisRoot}, got ${l1GenesisRoot} from rollup at ${rollupAddress}. ` + + `Entering standby mode. Will poll every ${ROLLUP_POLL_INTERVAL_S}s for a compatible rollup...`, + ); + + const standbyServer = await startHttpRpcServer({ getApp: () => new Koa(), isHealthy: () => true }, { port }); + userLog(`Standby status server listening on port ${standbyServer.port}`); + + try { + await retryUntil( + async () => { + const currentRollupAddress = await registry.getRollupAddress(rollupVersion); + const currentRollup = new RollupContract(publicClient, currentRollupAddress.toString()); + + let currentGenesisRoot: Fr; + try { + currentGenesisRoot = await currentRollup.getGenesisArchiveTreeRoot(); + } catch { + userLog(`Failed to fetch genesis root from rollup at ${currentRollupAddress}. Retrying...`); + return undefined; + } + + if (currentGenesisRoot.equals(expectedGenesisRoot)) { + userLog(`Compatible rollup found at ${currentRollupAddress}. Exiting standby mode.`); + return true; + } + + userLog(`Still waiting. Rollup at ${currentRollupAddress} has genesis root ${currentGenesisRoot}.`); + return undefined; + }, + 'compatible rollup', + 0, + ROLLUP_POLL_INTERVAL_S, + ); + } finally { + await new Promise((resolve, reject) => standbyServer.close(err => (err ? reject(err) : resolve()))); + } +} diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 21839223820f..f84cd10284be 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -1,20 +1,14 @@ -import { getInitialTestAccountsData } from '@aztec/accounts/testing'; import { type AztecNodeConfig, aztecNodeConfigMappings, getConfigEnvVars } from '@aztec/aztec-node'; import { Fr } from '@aztec/aztec.js/fields'; -import { getSponsoredFPCAddress } from '@aztec/cli/cli-utils'; import { getL1Config } from '@aztec/cli/config'; import { getPublicClient } from '@aztec/ethereum/client'; -import { RegistryContract, RollupContract } from '@aztec/ethereum/contracts'; +import { getGenesisStateConfigEnvVars } from '@aztec/ethereum/config'; import { type NetworkNames, SecretValue } from '@aztec/foundation/config'; -import type { EthAddress } from '@aztec/foundation/eth-address'; import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; -import { startHttpRpcServer } from '@aztec/foundation/json-rpc/server'; import { Agent, makeUndiciFetch } from '@aztec/foundation/json-rpc/undici'; import type { LogFn } from '@aztec/foundation/log'; -import { sleep } from '@aztec/foundation/sleep'; import { ProvingJobConsumerSchema, createProvingJobBrokerClient } from '@aztec/prover-client/broker'; import { type CliPXEOptions, type PXEConfig, allPxeConfigMappings } from '@aztec/pxe/config'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { AztecNodeAdminApiSchema, AztecNodeApiSchema } from '@aztec/stdlib/interfaces/client'; import { P2PApiSchema, ProverNodeApiSchema, type ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; import { @@ -24,9 +18,6 @@ import { telemetryClientConfigMappings, } from '@aztec/telemetry-client'; import { EmbeddedWallet } from '@aztec/wallets/embedded'; -import { getGenesisValues } from '@aztec/world-state/testing'; - -import Koa from 'koa'; import { createAztecNode } from '../../local-network/index.js'; import { @@ -36,74 +27,9 @@ import { setupVersionChecker, } from '../util.js'; import { getVersions } from '../versioning.js'; +import { computeExpectedGenesisRoot, waitForCompatibleRollup } from './standby.js'; import { startProverBroker } from './start_prover_broker.js'; -const ROLLUP_POLL_INTERVAL_MS = 600_000; - -/** - * Waits until the canonical rollup's genesis archive root matches the expected local genesis root. - * If the rollup is not yet compatible (e.g. during L1 contract upgrades), enters standby mode: - * starts a lightweight HTTP server for K8s liveness probes and polls until a compatible rollup appears. - */ -async function waitForCompatibleRollup( - publicClient: ReturnType, - registryAddress: EthAddress, - rollupVersion: number | 'canonical', - expectedGenesisRoot: Fr, - port: number | undefined, - userLog: LogFn, -): Promise { - const registry = new RegistryContract(publicClient, registryAddress); - const rollupAddress = await registry.getRollupAddress(rollupVersion); - const rollup = new RollupContract(publicClient, rollupAddress.toString()); - - let l1GenesisRoot: Fr; - try { - l1GenesisRoot = await rollup.getGenesisArchiveTreeRoot(); - } catch (err: any) { - throw new Error( - `Could not retrieve genesis archive root from canonical rollup at ${rollupAddress}: ${err.message}`, - ); - } - - if (l1GenesisRoot.equals(expectedGenesisRoot)) { - return; - } - - userLog( - `Genesis root mismatch: expected ${expectedGenesisRoot}, got ${l1GenesisRoot} from rollup at ${rollupAddress}. ` + - `Entering standby mode. Will poll every ${ROLLUP_POLL_INTERVAL_MS / 1000}s for a compatible rollup...`, - ); - - const standbyServer = await startHttpRpcServer({ getApp: () => new Koa(), isHealthy: () => true }, { port }); - userLog(`Standby status server listening on port ${standbyServer.port}`); - - try { - while (true) { - await sleep(ROLLUP_POLL_INTERVAL_MS); - - const currentRollupAddress = await registry.getRollupAddress(rollupVersion); - const currentRollup = new RollupContract(publicClient, currentRollupAddress.toString()); - - try { - l1GenesisRoot = await currentRollup.getGenesisArchiveTreeRoot(); - } catch { - userLog(`Failed to fetch genesis root from rollup at ${currentRollupAddress}. Retrying...`); - continue; - } - - if (l1GenesisRoot.equals(expectedGenesisRoot)) { - userLog(`Compatible rollup found at ${currentRollupAddress}. Exiting standby mode.`); - return; - } - - userLog(`Still waiting. Rollup at ${currentRollupAddress} has genesis root ${l1GenesisRoot}.`); - } - } finally { - await new Promise((resolve, reject) => standbyServer.close(err => (err ? reject(err) : resolve()))); - } -} - export async function startNode( options: any, signalHandlers: (() => Promise)[], @@ -154,16 +80,8 @@ export async function startNode( await preloadCrsDataForVerifying(nodeConfig, userLog); - const testAccounts = nodeConfig.testAccounts ? (await getInitialTestAccountsData()).map(a => a.address) : []; - const sponsoredFPCAccounts = nodeConfig.sponsoredFPC ? [await getSponsoredFPCAddress()] : []; - const prefundAddresses = (nodeConfig.prefundAddresses ?? []).map(a => AztecAddress.fromString(a)); - const initialFundedAccounts = testAccounts.concat(sponsoredFPCAccounts).concat(prefundAddresses); - - userLog(`Initial funded accounts: ${initialFundedAccounts.map(a => a.toString()).join(', ')}`); - - const { genesisArchiveRoot, prefilledPublicData } = await getGenesisValues(initialFundedAccounts); - - userLog(`Genesis archive root: ${genesisArchiveRoot.toString()}`); + const genesisConfig = getGenesisStateConfigEnvVars(); + const { genesisArchiveRoot, prefilledPublicData } = await computeExpectedGenesisRoot(genesisConfig, userLog); const followsCanonicalRollup = typeof nodeConfig.rollupVersion !== 'number' || (nodeConfig.rollupVersion as unknown as string) === 'canonical'; @@ -174,16 +92,7 @@ export async function startNode( // Wait for a compatible rollup before proceeding with full L1 config fetch. // This prevents crashes when the canonical rollup hasn't been upgraded yet. - const publicClient = getPublicClient(nodeConfig); - const rollupVersion: number | 'canonical' = nodeConfig.rollupVersion ?? 'canonical'; - await waitForCompatibleRollup( - publicClient, - nodeConfig.l1Contracts.registryAddress, - rollupVersion, - genesisArchiveRoot, - options.port, - userLog, - ); + await waitForCompatibleRollup(nodeConfig, genesisArchiveRoot, options.port, userLog); const { addresses, config } = await getL1Config( nodeConfig.l1Contracts.registryAddress, diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts index 75c320265f5b..3ff5bad42808 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts @@ -1,4 +1,5 @@ import { getL1Config } from '@aztec/cli/config'; +import { getGenesisStateConfigEnvVars } from '@aztec/ethereum/config'; import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import type { LogFn } from '@aztec/foundation/log'; import { @@ -13,6 +14,7 @@ import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; import { getConfigEnvVars as getTelemetryClientConfig, initTelemetryClient } from '@aztec/telemetry-client'; import { extractRelevantOptions } from '../util.js'; +import { computeExpectedGenesisRoot, waitForCompatibleRollup } from './standby.js'; export async function startProverBroker( options: any, @@ -34,6 +36,10 @@ export async function startProverBroker( throw new Error('L1 registry address is required to start Aztec Node without --deploy-aztec-contracts option'); } + const genesisConfig = getGenesisStateConfigEnvVars(); + const { genesisArchiveRoot } = await computeExpectedGenesisRoot(genesisConfig, userLog); + await waitForCompatibleRollup(config, genesisArchiveRoot, options.port, userLog); + const { addresses, config: rollupConfig } = await getL1Config( config.l1Contracts.registryAddress, config.l1RpcUrls, From c2758d8454fb6c03cc7fb88345df219774f27d0b Mon Sep 17 00:00:00 2001 From: danielntmd Date: Tue, 3 Mar 2026 21:06:47 +0000 Subject: [PATCH 17/37] refactor(p2p): maintain sorted array in tx pool instead of sorting on read - Replace Map> priority index with a sorted array, eliminating O(n log n) re-sorting on every read of pending transactions. - Cache the txHash string version to remove type conversions. --- .../src/mem_pools/tx_pool_v2/tx_metadata.ts | 8 +- .../tx_pool_v2/tx_pool_indices.test.ts | 184 ++++++++++++++++++ .../mem_pools/tx_pool_v2/tx_pool_indices.ts | 72 +++---- 3 files changed, 217 insertions(+), 47 deletions(-) create mode 100644 yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.test.ts diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts index 3874a7aab292..1d2f434fb2fb 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_metadata.ts @@ -158,13 +158,13 @@ export function txHashFromBigInt(value: bigint): string { } /** Minimal fields required for priority comparison. */ -type PriorityComparable = Pick; +export type PriorityComparable = Pick; /** * Compares two priority fees in ascending order. * Returns negative if a < b, positive if a > b, 0 if equal. */ -export function compareFee(a: bigint, b: bigint): number { +export function compareFee(a: bigint, b: bigint): -1 | 0 | 1 { return a < b ? -1 : a > b ? 1 : 0; } @@ -173,7 +173,7 @@ export function compareFee(a: bigint, b: bigint): number { * Uses field element comparison for deterministic ordering. * Returns negative if a < b, positive if a > b, 0 if equal. */ -export function compareTxHash(a: bigint, b: bigint): number { +export function compareTxHash(a: bigint, b: bigint): -1 | 0 | 1 { return Fr.cmpAsBigInt(a, b); } @@ -182,7 +182,7 @@ export function compareTxHash(a: bigint, b: bigint): number { * Returns negative if a < b, positive if a > b, 0 if equal. * Use with sort() for ascending order, or negate/reverse for descending. */ -export function comparePriority(a: PriorityComparable, b: PriorityComparable): number { +export function comparePriority(a: PriorityComparable, b: PriorityComparable): -1 | 0 | 1 { const feeComparison = compareFee(a.priorityFee, b.priorityFee); if (feeComparison !== 0) { return feeComparison; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.test.ts new file mode 100644 index 000000000000..313563becba0 --- /dev/null +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.test.ts @@ -0,0 +1,184 @@ +import { Fr } from '@aztec/foundation/curves/bn254'; + +import { stubTxMetaData, txHashFromBigInt } from './tx_metadata.js'; +import { TxPoolIndices } from './tx_pool_indices.js'; + +describe('TxPoolIndices', () => { + let indices: TxPoolIndices; + + const makeMeta = (seed: number, priorityFee: bigint) => + stubTxMetaData(new Fr(seed).toString(), { priorityFee, nullifiers: [`nullifier-${seed}`] }); + + beforeEach(() => { + indices = new TxPoolIndices(); + }); + + describe('sorted pending order', () => { + it('iterates descending by fee then hash', () => { + const low = makeMeta(1, 10n); + const mid = makeMeta(2, 50n); + const high = makeMeta(3, 100n); + + indices.addPending(low); + indices.addPending(high); + indices.addPending(mid); + + const desc = [...indices.iteratePendingByPriority('desc')]; + expect(desc).toEqual([high.txHash, mid.txHash, low.txHash]); + }); + + it('iterates ascending by fee then hash', () => { + const low = makeMeta(1, 10n); + const mid = makeMeta(2, 50n); + const high = makeMeta(3, 100n); + + indices.addPending(high); + indices.addPending(low); + indices.addPending(mid); + + const asc = [...indices.iteratePendingByPriority('asc')]; + expect(asc).toEqual([low.txHash, mid.txHash, high.txHash]); + }); + + it('uses txHash as tiebreaker for equal fees', () => { + const a = makeMeta(10, 50n); + const b = makeMeta(20, 50n); + const c = makeMeta(30, 50n); + + indices.addPending(c); + indices.addPending(a); + indices.addPending(b); + + const asc = [...indices.iteratePendingByPriority('asc')]; + expect(asc).toHaveLength(3); + + const hashes = [a, b, c].map(m => m.txHashBigInt); + hashes.sort((x, y) => (x < y ? -1 : x > y ? 1 : 0)); + const expectedAsc = hashes.map(h => txHashFromBigInt(h)); + expect(asc).toEqual(expectedAsc); + }); + }); + + describe('remove', () => { + it('maintains order after removal', () => { + const a = makeMeta(1, 10n); + const b = makeMeta(2, 50n); + const c = makeMeta(3, 100n); + + indices.addPending(a); + indices.addPending(b); + indices.addPending(c); + + indices.remove(b.txHash); + + const desc = [...indices.iteratePendingByPriority('desc')]; + expect(desc).toEqual([c.txHash, a.txHash]); + }); + + it('handles removing non-existent tx gracefully', () => { + const a = makeMeta(1, 10n); + indices.addPending(a); + + indices.remove('0xdeadbeef'); + expect(indices.getPendingTxCount()).toBe(1); + }); + }); + + describe('count', () => { + it('returns correct count after adds and removes', () => { + expect(indices.getPendingTxCount()).toBe(0); + + const a = makeMeta(1, 10n); + const b = makeMeta(2, 20n); + indices.addPending(a); + indices.addPending(b); + expect(indices.getPendingTxCount()).toBe(2); + + indices.remove(a.txHash); + expect(indices.getPendingTxCount()).toBe(1); + + indices.remove(b.txHash); + expect(indices.getPendingTxCount()).toBe(0); + }); + }); + + describe('getLowestPriorityPendingTx', () => { + it('returns the lowest priority tx', () => { + const low = makeMeta(1, 5n); + const high = makeMeta(2, 100n); + + indices.addPending(high); + indices.addPending(low); + + expect(indices.getLowestPriorityPendingTx()?.txHash).toBe(low.txHash); + }); + + it('returns undefined for empty pool', () => { + expect(indices.getLowestPriorityPendingTx()).toBeUndefined(); + }); + }); + + describe('filter', () => { + it('applies filter during iteration', () => { + const a = makeMeta(1, 10n); + const b = makeMeta(2, 50n); + const c = makeMeta(3, 100n); + + indices.addPending(a); + indices.addPending(b); + indices.addPending(c); + + const filtered = [...indices.iteratePendingByPriority('desc', hash => hash !== b.txHash)]; + expect(filtered).toEqual([c.txHash, a.txHash]); + }); + }); + + describe('eligible pending', () => { + it('filters by receivedAt', () => { + const old = makeMeta(1, 10n); + old.receivedAt = 100; + const recent = makeMeta(2, 50n); + recent.receivedAt = 500; + + indices.addPending(old); + indices.addPending(recent); + + const eligible = [...indices.iterateEligiblePendingByPriority('desc', 200)]; + expect(eligible).toEqual([old.txHash]); + }); + }); + + describe('edge cases', () => { + it('iterates empty pool without error', () => { + expect([...indices.iteratePendingByPriority('desc')]).toEqual([]); + expect([...indices.iteratePendingByPriority('asc')]).toEqual([]); + }); + + it('handles single element', () => { + const a = makeMeta(1, 10n); + indices.addPending(a); + + expect([...indices.iteratePendingByPriority('desc')]).toEqual([a.txHash]); + expect([...indices.iteratePendingByPriority('asc')]).toEqual([a.txHash]); + }); + + it('does not add duplicates', () => { + const a = makeMeta(1, 10n); + indices.addPending(a); + indices.addPending(a); + + expect(indices.getPendingTxCount()).toBe(1); + }); + + it('add-remove-add cycle works', () => { + const a = makeMeta(1, 10n); + indices.addPending(a); + indices.remove(a.txHash); + expect(indices.getPendingTxCount()).toBe(0); + + indices.addPending(a); + expect(indices.getPendingTxCount()).toBe(1); + expect([...indices.iteratePendingByPriority('desc')]).toEqual([a.txHash]); + }); + }); +}); diff --git a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts index 42dd87db5cbf..f39a4c4e5a09 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool_v2/tx_pool_indices.ts @@ -1,7 +1,8 @@ +import { insertIntoSortedArray, removeFromSortedArray } from '@aztec/foundation/array'; import { SlotNumber } from '@aztec/foundation/branded-types'; import type { L2BlockId } from '@aztec/stdlib/block'; -import { type TxMetaData, type TxState, compareFee, compareTxHash, txHashFromBigInt } from './tx_metadata.js'; +import { type PriorityComparable, type TxMetaData, type TxState, comparePriority } from './tx_metadata.js'; /** * Manages in-memory indices for the transaction pool. @@ -22,8 +23,8 @@ export class TxPoolIndices { #nullifierToTxHash: Map = new Map(); /** Fee payer to txHashes index (pending txs only) */ #feePayerToTxHashes: Map> = new Map(); - /** Pending txHash bigints grouped by priority fee */ - #pendingByPriority: Map> = new Map(); + /** Pending transactions sorted ascending by priority fee, ties broken by txHash */ + #pendingByPriority: PriorityComparable[] = []; /** Protected transactions: txHash -> slotNumber */ #protectedTransactions: Map = new Map(); @@ -73,20 +74,14 @@ export class TxPoolIndices { * @param order - 'desc' for highest priority first, 'asc' for lowest priority first */ *iteratePendingByPriority(order: 'asc' | 'desc', filter?: (hash: string) => boolean): Generator { - const feeCompareFn = order === 'desc' ? (a: bigint, b: bigint) => compareFee(b, a) : compareFee; - const hashCompareFn = - order === 'desc' ? (a: bigint, b: bigint) => compareTxHash(b, a) : (a: bigint, b: bigint) => compareTxHash(a, b); - - const sortedFees = [...this.#pendingByPriority.keys()].sort(feeCompareFn); - - for (const fee of sortedFees) { - const hashesAtFee = this.#pendingByPriority.get(fee)!; - const sortedHashes = [...hashesAtFee].sort(hashCompareFn); - for (const hashBigInt of sortedHashes) { - const hash = txHashFromBigInt(hashBigInt); - if (filter === undefined || filter(hash)) { - yield hash; - } + const arr = this.#pendingByPriority; + const start = order === 'asc' ? 0 : arr.length - 1; + const step = order === 'asc' ? 1 : -1; + const inBounds = order === 'asc' ? (i: number) => i < arr.length : (i: number) => i >= 0; + + for (let i = start; inBounds(i); i += step) { + if (filter === undefined || filter(arr[i].txHash)) { + yield arr[i].txHash; } } } @@ -227,11 +222,7 @@ export class TxPoolIndices { /** Gets the count of pending transactions */ getPendingTxCount(): number { - let count = 0; - for (const hashes of this.#pendingByPriority.values()) { - count += hashes.size; - } - return count; + return this.#pendingByPriority.length; } /** Gets the lowest priority pending transaction hashes (up to limit) */ @@ -264,12 +255,10 @@ export class TxPoolIndices { /** Gets all pending transactions */ getPendingTxs(): TxMetaData[] { const result: TxMetaData[] = []; - for (const hashSet of this.#pendingByPriority.values()) { - for (const txHashBigInt of hashSet) { - const meta = this.#metadata.get(txHashFromBigInt(txHashBigInt)); - if (meta) { - result.push(meta); - } + for (const entry of this.#pendingByPriority) { + const meta = this.#metadata.get(entry.txHash); + if (meta) { + result.push(meta); } } return result; @@ -408,13 +397,12 @@ export class TxPoolIndices { } feePayerSet.add(meta.txHash); - // Add to priority bucket - let prioritySet = this.#pendingByPriority.get(meta.priorityFee); - if (!prioritySet) { - prioritySet = new Set(); - this.#pendingByPriority.set(meta.priorityFee, prioritySet); - } - prioritySet.add(meta.txHashBigInt); + insertIntoSortedArray( + this.#pendingByPriority, + { txHash: meta.txHash, priorityFee: meta.priorityFee, txHashBigInt: meta.txHashBigInt }, + comparePriority, + false, + ); } #removeFromPendingIndices(meta: TxMetaData): void { @@ -432,13 +420,11 @@ export class TxPoolIndices { } } - // Remove from priority map - const hashSet = this.#pendingByPriority.get(meta.priorityFee); - if (hashSet) { - hashSet.delete(meta.txHashBigInt); - if (hashSet.size === 0) { - this.#pendingByPriority.delete(meta.priorityFee); - } - } + // Remove from priority array + removeFromSortedArray( + this.#pendingByPriority, + { txHash: meta.txHash, priorityFee: meta.priorityFee, txHashBigInt: meta.txHashBigInt }, + comparePriority, + ); } } From 9be06105edf71db4778fc24893c9c74f709f9da9 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 4 Mar 2026 11:22:03 -0300 Subject: [PATCH 18/37] fix(p2p): remove default block handler in favor of block handler (#21105) The default block handler was limited in that it failed to handle block proposals out of order, missed validations, missed proper timeouts, etc. It's not removed in favor of the full block proposal handler, which is installed if the validator client is not set. --- .../aztec-node/src/aztec-node/server.ts | 32 ++++---- yarn-project/p2p/src/client/p2p_client.ts | 22 ------ .../src/block_proposal_handler.ts | 78 +++++++++++-------- .../validator-client/src/validator.test.ts | 2 + 4 files changed, 64 insertions(+), 70 deletions(-) diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 6d1a4ce06bee..2569ae200853 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -381,22 +381,24 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { await validatorClient.registerHandlers(); } } + } - // If there's no validator client but alwaysReexecuteBlockProposals is enabled, - // create a BlockProposalHandler to reexecute block proposals for monitoring - if (!validatorClient && config.alwaysReexecuteBlockProposals) { - log.info('Setting up block proposal reexecution for monitoring'); - createBlockProposalHandler(config, { - checkpointsBuilder: validatorCheckpointsBuilder, - worldState: worldStateSynchronizer, - epochCache, - blockSource: archiver, - l1ToL2MessageSource: archiver, - p2pClient, - dateProvider, - telemetry, - }).registerForReexecution(p2pClient); - } + // If there's no validator client, create a BlockProposalHandler to handle block proposals + // for monitoring or reexecution. Reexecution (default) allows us to follow the pending chain, + // while non-reexecution is used for validating the proposals and collecting their txs. + if (!validatorClient) { + const reexecute = !!config.alwaysReexecuteBlockProposals; + log.info(`Setting up block proposal handler` + (reexecute ? ' with reexecution of proposals' : '')); + createBlockProposalHandler(config, { + checkpointsBuilder: validatorCheckpointsBuilder, + worldState: worldStateSynchronizer, + epochCache, + blockSource: archiver, + l1ToL2MessageSource: archiver, + p2pClient, + dateProvider, + telemetry, + }).register(p2pClient, reexecute); } // Start world state and wait for it to sync to the archiver. diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 7996594ff9cb..a6be4b34799a 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -18,7 +18,6 @@ import { type L2TipsStore, } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; -import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import { type PeerInfo, tryStop } from '@aztec/stdlib/interfaces/server'; import { type BlockProposal, CheckpointAttestation, type CheckpointProposal, type TopicType } from '@aztec/stdlib/p2p'; import type { BlockHeader, Tx, TxHash } from '@aztec/stdlib/tx'; @@ -111,27 +110,6 @@ export class P2PClient extends WithTracer implements P2P { this.telemetry, ); - // Default to collecting all txs when we see a valid proposal - // This can be overridden by the validator client to validate, and it will call getTxsForBlockProposal on its own - // Note: Validators do NOT attest to individual blocks - attestations are only for checkpoint proposals. - // TODO(palla/txs): We should not trigger a request for txs on a proposal before fully validating it. We need to bring - // validator-client code into here so we can validate a proposal is reasonable. - this.registerBlockProposalHandler(async (block, sender) => { - this.log.debug(`Received block proposal from ${sender.toString()}`); - // TODO(palla/txs): Need to subtract validatorReexecuteDeadlineMs from this deadline (see ValidatorClient.getReexecutionDeadline) - const constants = this.txCollection.getConstants(); - const nextSlotTimestampSeconds = Number(getTimestampForSlot(SlotNumber(block.slotNumber + 1), constants)); - const deadline = new Date(nextSlotTimestampSeconds * 1000); - const parentBlock = await this.l2BlockSource.getBlockHeaderByArchive(block.blockHeader.lastArchive.root); - if (!parentBlock) { - this.log.debug(`Cannot collect txs for proposal as parent block not found`); - return false; - } - const blockNumber = BlockNumber(parentBlock.getBlockNumber() + 1); - await this.txProvider.getTxsForBlockProposal(block, blockNumber, { pinnedPeer: sender, deadline }); - return true; - }); - this.l2Tips = new L2TipsKVStore(store, 'p2p_client'); this.synchedLatestSlot = store.openSingleton('p2p_pool_last_l2_slot'); } diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index 0c8812aee9a8..3aac5ec77b25 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -1,6 +1,7 @@ import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import type { EpochCache } from '@aztec/epoch-cache'; import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { pick } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { TimeoutError } from '@aztec/foundation/error'; import { createLogger } from '@aztec/foundation/log'; @@ -87,25 +88,28 @@ export class BlockProposalHandler { this.tracer = telemetry.getTracer('BlockProposalHandler'); } - registerForReexecution(p2pClient: P2P): BlockProposalHandler { - // Non-validator handler that re-executes for monitoring but does not attest. + register(p2pClient: P2P, shouldReexecute: boolean): BlockProposalHandler { + // Non-validator handler that processes or re-executes for monitoring but does not attest. // Returns boolean indicating whether the proposal was valid. const handler = async (proposal: BlockProposal, proposalSender: PeerId): Promise => { try { - const result = await this.handleBlockProposal(proposal, proposalSender, true); + const { slotNumber, blockNumber } = proposal; + const result = await this.handleBlockProposal(proposal, proposalSender, shouldReexecute); if (result.isValid) { - this.log.info(`Non-validator reexecution completed for slot ${proposal.slotNumber}`, { + this.log.info(`Non-validator block proposal ${blockNumber} at slot ${slotNumber} handled`, { blockNumber: result.blockNumber, + slotNumber, reexecutionTimeMs: result.reexecutionResult?.reexecutionTimeMs, totalManaUsed: result.reexecutionResult?.totalManaUsed, numTxs: result.reexecutionResult?.block?.body?.txEffects?.length ?? 0, + reexecuted: shouldReexecute, }); return true; } else { - this.log.warn(`Non-validator reexecution failed for slot ${proposal.slotNumber}`, { - blockNumber: result.blockNumber, - reason: result.reason, - }); + this.log.warn( + `Non-validator block proposal ${blockNumber} at slot ${slotNumber} failed processing with ${result.reason}`, + { blockNumber: result.blockNumber, slotNumber, reason: result.reason }, + ); return false; } } catch (error) { @@ -184,6 +188,15 @@ export class BlockProposalHandler { deadline: this.getReexecutionDeadline(slotNumber, config), }); + // If reexecution is disabled, bail. We are just interested in triggering tx collection. + if (!shouldReexecute) { + this.log.info( + `Received valid block ${blockNumber} proposal at index ${proposal.indexWithinCheckpoint} on slot ${slotNumber}`, + proposalInfo, + ); + return { isValid: true, blockNumber }; + } + // Compute the checkpoint number for this block and validate checkpoint consistency const checkpointResult = this.computeCheckpointNumber(proposal, parentBlock, proposalInfo); if (checkpointResult.reason) { @@ -210,30 +223,28 @@ export class BlockProposalHandler { return { isValid: false, blockNumber, reason: 'txs_not_available' }; } + // Collect the out hashes of all the checkpoints before this one in the same epoch + const epoch = getEpochAtSlot(slotNumber, this.epochCache.getL1Constants()); + const previousCheckpointOutHashes = (await this.blockSource.getCheckpointsDataForEpoch(epoch)) + .filter(c => c.checkpointNumber < checkpointNumber) + .map(c => c.checkpointOutHash); + // Try re-executing the transactions in the proposal if needed let reexecutionResult; - if (shouldReexecute) { - // Collect the out hashes of all the checkpoints before this one in the same epoch - const epoch = getEpochAtSlot(slotNumber, this.epochCache.getL1Constants()); - const previousCheckpointOutHashes = (await this.blockSource.getCheckpointsDataForEpoch(epoch)) - .filter(c => c.checkpointNumber < checkpointNumber) - .map(c => c.checkpointOutHash); - - try { - this.log.verbose(`Re-executing transactions in the proposal`, proposalInfo); - reexecutionResult = await this.reexecuteTransactions( - proposal, - blockNumber, - checkpointNumber, - txs, - l1ToL2Messages, - previousCheckpointOutHashes, - ); - } catch (error) { - this.log.error(`Error reexecuting txs while processing block proposal`, error, proposalInfo); - const reason = this.getReexecuteFailureReason(error); - return { isValid: false, blockNumber, reason, reexecutionResult }; - } + try { + this.log.verbose(`Re-executing transactions in the proposal`, proposalInfo); + reexecutionResult = await this.reexecuteTransactions( + proposal, + blockNumber, + checkpointNumber, + txs, + l1ToL2Messages, + previousCheckpointOutHashes, + ); + } catch (error) { + this.log.error(`Error reexecuting txs while processing block proposal`, error, proposalInfo); + const reason = this.getReexecuteFailureReason(error); + return { isValid: false, blockNumber, reason, reexecutionResult }; } // If we succeeded, push this block into the archiver (unless disabled) @@ -242,8 +253,8 @@ export class BlockProposalHandler { } this.log.info( - `Successfully processed block ${blockNumber} proposal at index ${proposal.indexWithinCheckpoint} on slot ${slotNumber}`, - proposalInfo, + `Successfully re-executed block ${blockNumber} proposal at index ${proposal.indexWithinCheckpoint} on slot ${slotNumber}`, + { ...proposalInfo, ...pick(reexecutionResult, 'reexecutionTimeMs', 'totalManaUsed') }, ); return { isValid: true, blockNumber, reexecutionResult }; @@ -488,10 +499,11 @@ export class BlockProposalHandler { const { block, failedTxs } = result; const numFailedTxs = failedTxs.length; - this.log.verbose(`Transaction re-execution complete for slot ${slot}`, { + this.log.verbose(`Block proposal ${blockNumber} at slot ${slot} transaction re-execution complete`, { numFailedTxs, numProposalTxs: txHashes.length, numProcessedTxs: block.body.txEffects.length, + blockNumber, slot, }); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 1477a94b01b2..14799f855c4b 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -616,6 +616,7 @@ describe('ValidatorClient', () => { }); it('should return false if the transactions are not available', async () => { + enableReexecution(); txProvider.getTxsForBlockProposal.mockImplementation(proposal => Promise.resolve({ txs: [], @@ -692,6 +693,7 @@ describe('ValidatorClient', () => { // L1 messages for the checkpoint) will catch it. it('should return false if global variables do not match parent for non-first block in checkpoint', async () => { + enableReexecution(); // Create a proposal with indexWithinCheckpoint > 0 (non-first block in checkpoint) const parentSlotNumber = 100; const parentBlockNumber = 10; From 84a36e90755055402e6c3291d2d597da2aede6ab Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 4 Mar 2026 11:23:53 -0300 Subject: [PATCH 19/37] feat(validator): add VALIDATOR_ env vars for independent block limits (#21060) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Adds `VALIDATOR_MAX_L2_BLOCK_GAS`, `VALIDATOR_MAX_DA_BLOCK_GAS`, `VALIDATOR_MAX_TX_PER_BLOCK`, and `VALIDATOR_MAX_TX_PER_CHECKPOINT` env vars so operators can tune validation limits independently from `SEQ_` proposer limits - When a `VALIDATOR_` var is not set, no per-block limit is enforced for that dimension (checkpoint-level protocol limits still apply) - P2P gossip validation uses `VALIDATOR_MAX_TX_PER_BLOCK` when set, falling back to the sequencer's `maxTxsPerBlock` ## Test plan - [x] `yarn build` passes - [x] `yarn format` and `yarn lint` clean - [x] `yarn workspace @aztec/validator-client test` — 158 passed - [x] `yarn workspace @aztec/stdlib test src/checkpoint/validate.test.ts` — 20 passed 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Opus 4.6 --- .../operators/reference/changelog/v4.md | 29 +++++++++++++++++-- .../aztec-node/src/aztec-node/server.ts | 11 +++++-- yarn-project/foundation/src/config/env_var.ts | 4 +++ yarn-project/p2p/src/config.ts | 9 ++++++ .../p2p/src/services/libp2p/libp2p_service.ts | 2 +- .../stdlib/src/interfaces/validator.ts | 19 ++++++++++-- yarn-project/validator-client/README.md | 6 +++- .../src/block_proposal_handler.ts | 7 +++++ yarn-project/validator-client/src/config.ts | 20 +++++++++++++ yarn-project/validator-client/src/factory.ts | 2 +- .../validator-client/src/validator.ts | 8 +++-- 11 files changed, 105 insertions(+), 12 deletions(-) diff --git a/docs/docs-operate/operators/reference/changelog/v4.md b/docs/docs-operate/operators/reference/changelog/v4.md index 774d43713996..599c58e8cfa6 100644 --- a/docs/docs-operate/operators/reference/changelog/v4.md +++ b/docs/docs-operate/operators/reference/changelog/v4.md @@ -87,13 +87,23 @@ The byte-based block size limit has been removed and replaced with field-based b --maxDABlockGas ($SEQ_MAX_DA_BLOCK_GAS) ``` -**New:** +**New (proposer):** ```bash --gasPerBlockAllocationMultiplier ($SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER) +--maxTxsPerCheckpoint ($SEQ_MAX_TX_PER_CHECKPOINT) ``` -**Migration**: Remove `SEQ_MAX_BLOCK_SIZE_IN_BYTES` from your configuration. Per-block L2 and DA gas budgets are now derived automatically as `(checkpointLimit / maxBlocks) * multiplier`, where the multiplier defaults to 2. You can still override `SEQ_MAX_L2_BLOCK_GAS` and `SEQ_MAX_DA_BLOCK_GAS` explicitly, but they will be capped at the checkpoint-level limits. +**New (validator):** + +```bash +--validateMaxL2BlockGas ($VALIDATOR_MAX_L2_BLOCK_GAS) +--validateMaxDABlockGas ($VALIDATOR_MAX_DA_BLOCK_GAS) +--validateMaxTxsPerBlock ($VALIDATOR_MAX_TX_PER_BLOCK) +--validateMaxTxsPerCheckpoint ($VALIDATOR_MAX_TX_PER_CHECKPOINT) +``` + +**Migration**: Remove `SEQ_MAX_BLOCK_SIZE_IN_BYTES` from your configuration. Per-block L2 and DA gas budgets are now derived automatically as `(checkpointLimit / maxBlocks) * multiplier`, where the multiplier defaults to 2. You can still override `SEQ_MAX_L2_BLOCK_GAS` and `SEQ_MAX_DA_BLOCK_GAS` explicitly, but they will be capped at the checkpoint-level limits. Validators can now set independent per-block and per-checkpoint limits via the `VALIDATOR_` env vars; when not set, only checkpoint-level protocol limits are enforced. ### Setup phase allow list requires function selectors @@ -201,6 +211,21 @@ P2P_RPC_PRICE_BUMP_PERCENTAGE=10 # default: 10 (percent) Set to `0` to disable the percentage-based bump (still requires strictly higher fee). +### Validator-specific block limits + +Validators can now enforce per-block and per-checkpoint limits independently from the sequencer (proposer) limits. This allows operators to accept proposals that exceed their own proposer settings, or to reject proposals that are too large even if the proposer's limits allow them. + +**Configuration:** + +```bash +VALIDATOR_MAX_L2_BLOCK_GAS= # Max L2 gas per block for validation +VALIDATOR_MAX_DA_BLOCK_GAS= # Max DA gas per block for validation +VALIDATOR_MAX_TX_PER_BLOCK= # Max txs per block for validation +VALIDATOR_MAX_TX_PER_CHECKPOINT= # Max txs per checkpoint for validation +``` + +When not set, no per-block limit is enforced for that dimension — only checkpoint-level protocol limits apply. These do not fall back to the `SEQ_` values. + ### Setup allow list extendable via network config The setup phase allow list can now be extended via the network configuration JSON (`txPublicSetupAllowListExtend` field). This allows network operators to distribute additional allowed setup functions to all nodes without requiring code changes. The local environment variable takes precedence over the network-json value. diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 2569ae200853..0f831bf4b767 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -346,9 +346,16 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { // We'll accumulate sentinel watchers here const watchers: Watcher[] = []; - // Create FullNodeCheckpointsBuilder for block proposal handling and tx validation + // Create FullNodeCheckpointsBuilder for block proposal handling and tx validation. + // Override maxTxsPerCheckpoint with the validator-specific limit if set. const validatorCheckpointsBuilder = new FullNodeCheckpointsBuilder( - { ...config, l1GenesisTime, slotDuration: Number(slotDuration), rollupManaLimit }, + { + ...config, + l1GenesisTime, + slotDuration: Number(slotDuration), + rollupManaLimit, + maxTxsPerCheckpoint: config.validateMaxTxsPerCheckpoint, + }, worldStateSynchronizer, archiver, dateProvider, diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 0a8f28b97e84..07b60428cb17 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -279,6 +279,10 @@ export type EnvVar = | 'TRANSACTIONS_DISABLED' | 'VALIDATOR_ATTESTATIONS_POLLING_INTERVAL_MS' | 'VALIDATOR_DISABLED' + | 'VALIDATOR_MAX_DA_BLOCK_GAS' + | 'VALIDATOR_MAX_L2_BLOCK_GAS' + | 'VALIDATOR_MAX_TX_PER_BLOCK' + | 'VALIDATOR_MAX_TX_PER_CHECKPOINT' | 'VALIDATOR_PRIVATE_KEYS' | 'VALIDATOR_PRIVATE_KEY' | 'VALIDATOR_REEXECUTE' diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index 5189cab7adb9..fc27123df79c 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -40,6 +40,9 @@ export interface P2PConfig TxCollectionConfig, TxFileStoreConfig, Pick { + /** Maximum transactions per block for validation. Overrides maxTxsPerBlock for gossip validation when set. */ + validateMaxTxsPerBlock?: number; + /** A flag dictating whether the P2P subsystem should be enabled. */ p2pEnabled: boolean; @@ -199,6 +202,12 @@ export interface P2PConfig export const DEFAULT_P2P_PORT = 40400; export const p2pConfigMappings: ConfigMappingsType = { + validateMaxTxsPerBlock: { + env: 'VALIDATOR_MAX_TX_PER_BLOCK', + description: + 'Maximum transactions per block for validation. Overrides maxTxsPerBlock for gossip validation when set.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, p2pEnabled: { env: 'P2P_ENABLED', description: 'A flag dictating whether the P2P subsystem should be enabled.', diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index de3c5c9f8fc8..2a81e7a1350d 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -224,7 +224,7 @@ export class LibP2PService extends WithTracer implements P2PService { const proposalValidatorOpts = { txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.maxTxsPerBlock, + maxTxsPerBlock: config.validateMaxTxsPerBlock, }; this.blockProposalValidator = new BlockProposalValidator(epochCache, proposalValidatorOpts); this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, proposalValidatorOpts); diff --git a/yarn-project/stdlib/src/interfaces/validator.ts b/yarn-project/stdlib/src/interfaces/validator.ts index c6596a6aca72..f0b1c05e2209 100644 --- a/yarn-project/stdlib/src/interfaces/validator.ts +++ b/yarn-project/stdlib/src/interfaces/validator.ts @@ -59,10 +59,22 @@ export type ValidatorClientConfig = ValidatorHASignerConfig & { /** Agree to attest to equivocated checkpoint proposals (for testing purposes only) */ attestToEquivocatedProposals?: boolean; + + /** Maximum L2 gas per block for validation. Proposals exceeding this limit are rejected. */ + validateMaxL2BlockGas?: number; + + /** Maximum DA gas per block for validation. Proposals exceeding this limit are rejected. */ + validateMaxDABlockGas?: number; + + /** Maximum transactions per block for validation. Proposals exceeding this limit are rejected. */ + validateMaxTxsPerBlock?: number; + + /** Maximum transactions per checkpoint for validation. Proposals exceeding this limit are rejected. */ + validateMaxTxsPerCheckpoint?: number; }; export type ValidatorClientFullConfig = ValidatorClientConfig & - Pick & + Pick & Pick< SlasherConfig, 'slashBroadcastedInvalidBlockPenalty' | 'slashDuplicateProposalPenalty' | 'slashDuplicateAttestationPenalty' @@ -86,6 +98,10 @@ export const ValidatorClientConfigSchema = zodFor (val ? parseInt(val, 10) : undefined), + }, + validateMaxDABlockGas: { + env: 'VALIDATOR_MAX_DA_BLOCK_GAS', + description: 'Maximum DA block gas for validation. Proposals exceeding this limit are rejected.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, + validateMaxTxsPerBlock: { + env: 'VALIDATOR_MAX_TX_PER_BLOCK', + description: 'Maximum transactions per block for validation. Proposals exceeding this limit are rejected.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, + validateMaxTxsPerCheckpoint: { + env: 'VALIDATOR_MAX_TX_PER_CHECKPOINT', + description: 'Maximum transactions per checkpoint for validation. Proposals exceeding this limit are rejected.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, ...validatorHASignerConfigMappings, }; diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index eacdb5322965..b7645d48c485 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -29,7 +29,7 @@ export function createBlockProposalHandler( const metrics = new ValidatorMetrics(deps.telemetry); const blockProposalValidator = new BlockProposalValidator(deps.epochCache, { txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.maxTxsPerBlock, + maxTxsPerBlock: config.validateMaxTxsPerBlock, }); return new BlockProposalHandler( deps.checkpointsBuilder, diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 2fad5db0bca7..76f97ed0d86b 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -201,7 +201,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) const metrics = new ValidatorMetrics(telemetry); const blockProposalValidator = new BlockProposalValidator(epochCache, { txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.maxTxsPerBlock, + maxTxsPerBlock: config.validateMaxTxsPerBlock, }); const blockProposalHandler = new BlockProposalHandler( checkpointsBuilder, @@ -771,8 +771,10 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) try { validateCheckpoint(computedCheckpoint, { rollupManaLimit: this.checkpointsBuilder.getConfig().rollupManaLimit, - maxDABlockGas: undefined, - maxL2BlockGas: undefined, + maxDABlockGas: this.config.validateMaxDABlockGas, + maxL2BlockGas: this.config.validateMaxL2BlockGas, + maxTxsPerBlock: this.config.validateMaxTxsPerBlock, + maxTxsPerCheckpoint: this.config.validateMaxTxsPerCheckpoint, }); } catch (err) { this.log.warn(`Checkpoint validation failed: ${err}`, proposalInfo); From 445b8a4db170e2db627003bad2e3ceb838a08bf3 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 4 Mar 2026 13:28:14 -0300 Subject: [PATCH 20/37] refactor(p2p): decouple proposal validators from base class via composition (#21075) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Remove pass-through getters (`blockNumber`, `txHashes`, `txs`) from `CheckpointProposal` that just delegated to `lastBlock` - Split `ProposalValidator` into header validation (`validate`) and tx validation (`validateTxs`) - `BlockProposalValidator` and `CheckpointProposalValidator` now use `ProposalValidator` by composition instead of inheritance - Consolidate test suite into a single `proposal_validator.test.ts` that tests `ProposalValidator` directly with both proposal types ## Test plan - [x] `yarn build` passes - [x] `proposal_validator.test.ts` passes (28 tests: header validation with both block and checkpoint proposals, plus tx validation) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Opus 4.6 --- .../block_proposal_validator.test.ts | 26 -- .../block_proposal_validator.ts | 16 +- .../checkpoint_proposal_validator.test.ts | 130 ------ .../checkpoint_proposal_validator.ts | 25 +- .../proposal_validator.test.ts | 237 +++++++++++ .../proposal_validator/proposal_validator.ts | 105 ++--- .../proposal_validator_test_suite.ts | 379 ------------------ .../stdlib/src/p2p/checkpoint_proposal.ts | 17 - .../src/duties/validation_service.ts | 10 +- .../validator-client/src/validator.ts | 2 - 10 files changed, 328 insertions(+), 619 deletions(-) delete mode 100644 yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts delete mode 100644 yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts create mode 100644 yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.test.ts delete mode 100644 yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts deleted file mode 100644 index 873d387afd70..000000000000 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.test.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; -import { EthAddress } from '@aztec/foundation/eth-address'; -import { makeBlockHeader, makeBlockProposal } from '@aztec/stdlib/testing'; -import { TxHash } from '@aztec/stdlib/tx'; - -import { mock } from 'jest-mock-extended'; - -import { BlockProposalValidator } from './block_proposal_validator.js'; -import { sharedProposalValidatorTests } from './proposal_validator_test_suite.js'; - -describe('BlockProposalValidator', () => { - sharedProposalValidatorTests({ - validatorFactory: (epochCache, opts) => new BlockProposalValidator(epochCache, opts), - makeProposal: makeBlockProposal, - makeHeader: (epochNumber: number | bigint, slotNumber: number | bigint, blockNumber: number | bigint) => - makeBlockHeader(0, { blockNumber: BlockNumber(Number(blockNumber)), slotNumber: SlotNumber(Number(slotNumber)) }), - getSigner: () => Secp256k1Signer.random(), - getAddress: (signer?: Secp256k1Signer) => (signer ? signer.address : EthAddress.random()), - getSlot: (slot: number | bigint) => SlotNumber(Number(slot)), - getTxHashes: (n: number) => Array.from({ length: n }, () => TxHash.random()), - getTxs: () => [], - epochCacheMock: () => mock(), - }); -}); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts index a481256e9f37..bac274f46475 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts @@ -1,10 +1,20 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import type { BlockProposal, P2PValidator } from '@aztec/stdlib/p2p'; +import type { BlockProposal, P2PValidator, ValidationResult } from '@aztec/stdlib/p2p'; import { ProposalValidator } from '../proposal_validator/proposal_validator.js'; -export class BlockProposalValidator extends ProposalValidator implements P2PValidator { +export class BlockProposalValidator implements P2PValidator { + private proposalValidator: ProposalValidator; + constructor(epochCache: EpochCacheInterface, opts: { txsPermitted: boolean; maxTxsPerBlock?: number }) { - super(epochCache, opts, 'p2p:block_proposal_validator'); + this.proposalValidator = new ProposalValidator(epochCache, opts, 'p2p:block_proposal_validator'); + } + + async validate(proposal: BlockProposal): Promise { + const headerResult = await this.proposalValidator.validate(proposal); + if (headerResult.result !== 'accept') { + return headerResult; + } + return this.proposalValidator.validateTxs(proposal); } } diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts deleted file mode 100644 index ea093cd2ab2e..000000000000 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts +++ /dev/null @@ -1,130 +0,0 @@ -import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import { SlotNumber } from '@aztec/foundation/branded-types'; -import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; -import { EthAddress } from '@aztec/foundation/eth-address'; -import { CheckpointHeader } from '@aztec/stdlib/rollup'; -import type { MakeCheckpointProposalOptions } from '@aztec/stdlib/testing'; -import { makeBlockHeader, makeCheckpointHeader, makeCheckpointProposal } from '@aztec/stdlib/testing'; -import { TxHash } from '@aztec/stdlib/tx'; - -import { mock } from 'jest-mock-extended'; - -import { CheckpointProposalValidator } from './checkpoint_proposal_validator.js'; -import { sharedProposalValidatorTests } from './proposal_validator_test_suite.js'; - -describe('CheckpointProposalValidator', () => { - /** - * Adapter function to convert shared test options to CheckpointProposal options. - * The shared test uses blockHeader/lastBlockHeader, but CheckpointProposal uses - * checkpointHeader and lastBlock.blockHeader. - */ - const makeCheckpointProposalAdapter = (options?: { - blockHeader?: CheckpointHeader; - lastBlockHeader?: CheckpointHeader; - signer?: Secp256k1Signer; - txHashes?: TxHash[]; - txs?: any[]; - }) => { - // Use the blockHeader directly as the checkpointHeader - const checkpointHeader = options?.blockHeader ?? makeCheckpointHeader(1); - - // Create a BlockHeader for the lastBlock using the slot from the checkpointHeader - const lastBlockBlockHeader = options?.lastBlockHeader - ? makeBlockHeader(0, { slotNumber: checkpointHeader.slotNumber }) - : undefined; - - const adaptedOptions: MakeCheckpointProposalOptions = { - signer: options?.signer, - checkpointHeader, - // Create lastBlock with a proper BlockHeader - lastBlock: lastBlockBlockHeader - ? { - blockHeader: lastBlockBlockHeader, - txHashes: options?.txHashes, - txs: options?.txs, - } - : undefined, - }; - - return makeCheckpointProposal(adaptedOptions); - }; - - sharedProposalValidatorTests({ - validatorFactory: (epochCache, opts) => new CheckpointProposalValidator(epochCache, opts), - makeProposal: makeCheckpointProposalAdapter, - makeHeader: (_epochNumber: number | bigint, slotNumber: number | bigint, _blockNumber: number | bigint) => - makeCheckpointHeader(0, { slotNumber: SlotNumber(Number(slotNumber)) }), - getSigner: () => Secp256k1Signer.random(), - getAddress: (signer?: Secp256k1Signer) => (signer ? signer.address : EthAddress.random()), - getSlot: (slot: number | bigint) => SlotNumber(Number(slot)), - getTxHashes: (n: number) => Array.from({ length: n }, () => TxHash.random()), - getTxs: () => [], - epochCacheMock: () => mock(), - }); - - describe('maxTxsPerBlock validation', () => { - const currentSlot = SlotNumber(100); - const nextSlot = SlotNumber(101); - let epochCache: ReturnType>; - - function setupEpochCache(proposerAddress: EthAddress) { - epochCache = mock(); - epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot, nextSlot }); - epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(proposerAddress); - } - - it('rejects checkpoint proposal when last block txHashes exceed maxTxsPerBlock', async () => { - const signer = Secp256k1Signer.random(); - setupEpochCache(signer.address); - const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }); - - const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); - const proposal = await makeCheckpointProposalAdapter({ - blockHeader: header, - lastBlockHeader: header, - signer, - txHashes: Array.from({ length: 3 }, () => TxHash.random()), - }); - - const result = await validator.validate(proposal); - expect(result).toEqual({ result: 'reject', severity: expect.anything() }); - }); - - it('accepts checkpoint proposal when last block txHashes are within maxTxsPerBlock', async () => { - const signer = Secp256k1Signer.random(); - setupEpochCache(signer.address); - const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 5 }); - - const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); - const proposal = await makeCheckpointProposalAdapter({ - blockHeader: header, - lastBlockHeader: header, - signer, - txHashes: Array.from({ length: 3 }, () => TxHash.random()), - }); - - const result = await validator.validate(proposal); - expect(result).toEqual({ result: 'accept' }); - }); - - it('skips maxTxsPerBlock check when not configured', async () => { - const signer = Secp256k1Signer.random(); - setupEpochCache(signer.address); - const validator = new CheckpointProposalValidator(epochCache, { - txsPermitted: true, - maxTxsPerBlock: undefined, - }); - - const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); - const proposal = await makeCheckpointProposalAdapter({ - blockHeader: header, - lastBlockHeader: header, - signer, - txHashes: Array.from({ length: 100 }, () => TxHash.random()), - }); - - const result = await validator.validate(proposal); - expect(result).toEqual({ result: 'accept' }); - }); - }); -}); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts index 74804fe45d21..11d94fe6a9d5 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts @@ -1,13 +1,26 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import type { CheckpointProposal, P2PValidator } from '@aztec/stdlib/p2p'; +import type { CheckpointProposal, P2PValidator, ValidationResult } from '@aztec/stdlib/p2p'; import { ProposalValidator } from '../proposal_validator/proposal_validator.js'; -export class CheckpointProposalValidator - extends ProposalValidator - implements P2PValidator -{ +export class CheckpointProposalValidator implements P2PValidator { + private proposalValidator: ProposalValidator; + constructor(epochCache: EpochCacheInterface, opts: { txsPermitted: boolean; maxTxsPerBlock?: number }) { - super(epochCache, opts, 'p2p:checkpoint_proposal_validator'); + this.proposalValidator = new ProposalValidator(epochCache, opts, 'p2p:checkpoint_proposal_validator'); + } + + async validate(proposal: CheckpointProposal): Promise { + const headerResult = await this.proposalValidator.validate(proposal); + if (headerResult.result !== 'accept') { + return headerResult; + } + + const blockProposal = proposal.getBlockProposal(); + if (blockProposal) { + return this.proposalValidator.validateTxs(blockProposal); + } + + return { result: 'accept' }; } } diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.test.ts new file mode 100644 index 000000000000..8df14cd951f0 --- /dev/null +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.test.ts @@ -0,0 +1,237 @@ +import type { EpochCacheInterface } from '@aztec/epoch-cache'; +import { NoCommitteeError } from '@aztec/ethereum/contracts'; +import { SlotNumber } from '@aztec/foundation/branded-types'; +import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; +import { + makeBlockHeader, + makeBlockProposal, + makeCheckpointHeader, + makeCheckpointProposal, +} from '@aztec/stdlib/testing'; +import { TxHash } from '@aztec/stdlib/tx'; + +import { jest } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { ProposalValidator } from './proposal_validator.js'; + +describe('ProposalValidator', () => { + const currentSlot = SlotNumber(100); + const nextSlot = SlotNumber(101); + const previousSlot = SlotNumber(99); + let epochCache: MockProxy; + let validator: ProposalValidator; + + function mockGetProposer(currentProposer: EthAddress, nextProposer: EthAddress, previousProposer?: EthAddress) { + epochCache.getProposerAttesterAddressInSlot.mockImplementation(slot => { + if (slot === currentSlot) { + return Promise.resolve(currentProposer); + } + if (slot === nextSlot) { + return Promise.resolve(nextProposer); + } + if (slot === previousSlot && previousProposer) { + return Promise.resolve(previousProposer); + } + throw new Error('Unexpected argument'); + }); + } + + beforeEach(() => { + epochCache = mock(); + validator = new ProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: undefined }, 'test'); + epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot, nextSlot }); + }); + + describe.each([ + { + name: 'block proposal', + factory: (slotNumber: SlotNumber, signer: Secp256k1Signer) => + makeBlockProposal({ blockHeader: makeBlockHeader(0, { slotNumber }), signer }), + }, + { + name: 'checkpoint proposal', + factory: (slotNumber: SlotNumber, signer: Secp256k1Signer) => + makeCheckpointProposal({ checkpointHeader: makeCheckpointHeader(0, { slotNumber }), signer }), + }, + ])('validate with $name', ({ factory }) => { + it('rejects with high tolerance error if slot is outside clock tolerance', async () => { + const proposal = await factory(previousSlot, Secp256k1Signer.random()); + + epochCache.getEpochAndSlotNow.mockReturnValue({ + epoch: 1 as any, + slot: currentSlot, + ts: 1000n, + nowMs: 1001000n, // 1000ms elapsed, outside 500ms tolerance + }); + + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(EthAddress.random()); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.HighToleranceError }); + expect(epochCache.getProposerAttesterAddressInSlot).not.toHaveBeenCalled(); + }); + + it('ignores if previous slot proposal is within clock tolerance', async () => { + const signer = Secp256k1Signer.random(); + const proposal = await factory(previousSlot, signer); + + epochCache.getEpochAndSlotNow.mockReturnValue({ + epoch: 1 as any, + slot: currentSlot, + ts: 1000n, + nowMs: 1000100n, // 100ms elapsed, within 500ms tolerance + }); + + mockGetProposer(EthAddress.random(), EthAddress.random(), signer.address); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'ignore' }); + }); + + it('rejects with mid tolerance error if signature is invalid', async () => { + const signer = Secp256k1Signer.random(); + const proposal = await factory(currentSlot, signer); + + jest.spyOn(proposal as any, 'getSender').mockReturnValue(undefined); + + mockGetProposer(signer.address, EthAddress.random()); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); + expect(epochCache.getProposerAttesterAddressInSlot).not.toHaveBeenCalled(); + }); + + it('rejects with mid tolerance error if proposer is wrong for current slot', async () => { + const proposal = await factory(currentSlot, Secp256k1Signer.random()); + + mockGetProposer(EthAddress.random(), EthAddress.random()); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); + }); + + it('rejects with mid tolerance error if proposer is wrong for next slot', async () => { + const proposal = await factory(nextSlot, Secp256k1Signer.random()); + + mockGetProposer(EthAddress.random(), EthAddress.random()); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); + }); + + it('rejects with mid tolerance error if current proposer sends for next slot', async () => { + const currentProposer = Secp256k1Signer.random(); + const proposal = await factory(nextSlot, currentProposer); + + mockGetProposer(currentProposer.address, EthAddress.random()); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); + }); + + it('accepts when proposer is undefined (open committee)', async () => { + const proposal = await factory(currentSlot, Secp256k1Signer.random()); + + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(undefined); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + + it('rejects with low tolerance error on NoCommitteeError', async () => { + const proposal = await factory(currentSlot, Secp256k1Signer.random()); + + epochCache.getProposerAttesterAddressInSlot.mockRejectedValue(new NoCommitteeError()); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.LowToleranceError }); + }); + + it('accepts valid proposal for current slot', async () => { + const signer = Secp256k1Signer.random(); + const proposal = await factory(currentSlot, signer); + + mockGetProposer(signer.address, EthAddress.random()); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + + it('accepts valid proposal for next slot', async () => { + const signer = Secp256k1Signer.random(); + const proposal = await factory(nextSlot, signer); + + mockGetProposer(EthAddress.random(), signer.address); + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + }); + + describe('validateTxs', () => { + describe('txsPermitted', () => { + it('rejects proposal with txHashes when txs not permitted', async () => { + validator = new ProposalValidator(epochCache, { txsPermitted: false, maxTxsPerBlock: undefined }, 'test'); + + const proposal = await makeBlockProposal({ txHashes: [TxHash.random(), TxHash.random()] }); + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); + }); + + it('accepts proposal with no txHashes when txs not permitted', async () => { + validator = new ProposalValidator(epochCache, { txsPermitted: false, maxTxsPerBlock: undefined }, 'test'); + + const proposal = await makeBlockProposal({ txHashes: [] }); + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + + it('accepts proposal with txHashes when txs permitted', async () => { + const proposal = await makeBlockProposal({ txHashes: [TxHash.random(), TxHash.random()] }); + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + }); + + describe('embedded tx validation', () => { + it('rejects if embedded txs are not listed in txHashes', async () => { + const txHashes = [TxHash.random(), TxHash.random()]; + const proposal = await makeBlockProposal({ txHashes }); + + const fakeTx = { getTxHash: () => TxHash.random(), validateTxHash: () => Promise.resolve(true) }; + Object.defineProperty(proposal, 'txs', { get: () => [fakeTx], configurable: true }); + + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); + }); + + it('rejects if embedded tx has invalid tx hash', async () => { + const txHashes = [TxHash.random(), TxHash.random()]; + const proposal = await makeBlockProposal({ txHashes }); + + const fakeTx = { getTxHash: () => txHashes[0], validateTxHash: () => Promise.resolve(false) }; + Object.defineProperty(proposal, 'txs', { get: () => [fakeTx], configurable: true }); + + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.LowToleranceError }); + }); + }); + + describe('maxTxsPerBlock', () => { + it('rejects when txHashes exceed maxTxsPerBlock', async () => { + validator = new ProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }, 'test'); + + const proposal = await makeBlockProposal({ txHashes: Array.from({ length: 3 }, () => TxHash.random()) }); + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); + }); + + it('accepts when txHashes count equals maxTxsPerBlock', async () => { + validator = new ProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }, 'test'); + + const proposal = await makeBlockProposal({ txHashes: Array.from({ length: 2 }, () => TxHash.random()) }); + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + + it('accepts when maxTxsPerBlock is not set (unlimited)', async () => { + const proposal = await makeBlockProposal({ txHashes: Array.from({ length: 10 }, () => TxHash.random()) }); + const result = await validator.validateTxs(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + }); + }); +}); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts index a926d1f3c144..45c38dd61529 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts @@ -1,15 +1,21 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; import { NoCommitteeError } from '@aztec/ethereum/contracts'; import { type Logger, createLogger } from '@aztec/foundation/log'; -import { BlockProposal, CheckpointProposal, PeerErrorSeverity, type ValidationResult } from '@aztec/stdlib/p2p'; +import { + type BlockProposal, + type CheckpointProposalCore, + PeerErrorSeverity, + type ValidationResult, +} from '@aztec/stdlib/p2p'; import { isWithinClockTolerance } from '../clock_tolerance.js'; -export abstract class ProposalValidator { - protected epochCache: EpochCacheInterface; - protected logger: Logger; - protected txsPermitted: boolean; - protected maxTxsPerBlock?: number; +/** Validates header-level and tx-level fields of block and checkpoint proposals. */ +export class ProposalValidator { + private epochCache: EpochCacheInterface; + private logger: Logger; + private txsPermitted: boolean; + private maxTxsPerBlock?: number; constructor( epochCache: EpochCacheInterface, @@ -22,7 +28,8 @@ export abstract class ProposalValidator { + /** Validates header-level fields: slot, signature, and proposer. */ + public async validate(proposal: BlockProposal | CheckpointProposalCore): Promise { try { // Slot check const { currentSlot, nextSlot } = this.epochCache.getCurrentAndNextSlot(); @@ -44,38 +51,6 @@ export abstract class ProposalValidator 0 || embeddedTxCount > 0)) { - this.logger.warn( - `Penalizing peer for proposal with ${proposal.txHashes.length} transaction(s) when transactions are not permitted`, - ); - return { result: 'reject', severity: PeerErrorSeverity.MidToleranceError }; - } - - // Max txs per block check - if (this.maxTxsPerBlock !== undefined && proposal.txHashes.length > this.maxTxsPerBlock) { - this.logger.warn( - `Penalizing peer for proposal with ${proposal.txHashes.length} transaction(s) when max is ${this.maxTxsPerBlock}`, - ); - return { result: 'reject', severity: PeerErrorSeverity.MidToleranceError }; - } - - // Embedded txs must be listed in txHashes - const hashSet = new Set(proposal.txHashes.map(h => h.toString())); - const missingTxHashes = - embeddedTxCount > 0 - ? proposal.txs!.filter(tx => !hashSet.has(tx.getTxHash().toString())).map(tx => tx.getTxHash().toString()) - : []; - if (embeddedTxCount > 0 && missingTxHashes.length > 0) { - this.logger.warn('Penalizing peer for embedded transaction(s) not included in txHashes', { - embeddedTxCount, - txHashesLength: proposal.txHashes.length, - missingTxHashes, - }); - return { result: 'reject', severity: PeerErrorSeverity.MidToleranceError }; - } - // Proposer check const expectedProposer = await this.epochCache.getProposerAttesterAddressInSlot(slotNumber); if (expectedProposer !== undefined && !proposer.equals(expectedProposer)) { @@ -86,15 +61,6 @@ export abstract class ProposalValidator tx.validateTxHash()) ?? [])).every(v => v)) { - this.logger.warn(`Penalizing peer for invalid tx hashes in proposal`, { - proposer, - slotNumber, - }); - return { result: 'reject', severity: PeerErrorSeverity.LowToleranceError }; - } - return { result: 'accept' }; } catch (e) { if (e instanceof NoCommitteeError) { @@ -103,4 +69,47 @@ export abstract class ProposalValidator { + // Transactions permitted check + const embeddedTxCount = proposal.txs?.length ?? 0; + if (!this.txsPermitted && (proposal.txHashes.length > 0 || embeddedTxCount > 0)) { + this.logger.warn( + `Penalizing peer for proposal with ${proposal.txHashes.length} transaction(s) when transactions are not permitted`, + ); + return { result: 'reject', severity: PeerErrorSeverity.MidToleranceError }; + } + + // Max txs per block check + if (this.maxTxsPerBlock !== undefined && proposal.txHashes.length > this.maxTxsPerBlock) { + this.logger.warn( + `Penalizing peer for proposal with ${proposal.txHashes.length} transaction(s) when max is ${this.maxTxsPerBlock}`, + ); + return { result: 'reject', severity: PeerErrorSeverity.MidToleranceError }; + } + + // Embedded txs must be listed in txHashes + const hashSet = new Set(proposal.txHashes.map(h => h.toString())); + const missingTxHashes = + embeddedTxCount > 0 + ? proposal.txs!.filter(tx => !hashSet.has(tx.getTxHash().toString())).map(tx => tx.getTxHash().toString()) + : []; + if (embeddedTxCount > 0 && missingTxHashes.length > 0) { + this.logger.warn('Penalizing peer for embedded transaction(s) not included in txHashes', { + embeddedTxCount, + txHashesLength: proposal.txHashes.length, + missingTxHashes, + }); + return { result: 'reject', severity: PeerErrorSeverity.MidToleranceError }; + } + + // Validate tx hashes for all txs embedded in the proposal + if (!(await Promise.all(proposal.txs?.map(tx => tx.validateTxHash()) ?? [])).every(v => v)) { + this.logger.warn(`Penalizing peer for invalid tx hashes in proposal`); + return { result: 'reject', severity: PeerErrorSeverity.LowToleranceError }; + } + + return { result: 'accept' }; + } } diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts deleted file mode 100644 index ec12ec3442f6..000000000000 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts +++ /dev/null @@ -1,379 +0,0 @@ -import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import { NoCommitteeError } from '@aztec/ethereum/contracts'; -import type { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; -import type { EthAddress } from '@aztec/foundation/eth-address'; -import { - type BlockProposal, - type CheckpointProposal, - PeerErrorSeverity, - type ValidationResult, -} from '@aztec/stdlib/p2p'; -import type { TxHash } from '@aztec/stdlib/tx'; - -import { jest } from '@jest/globals'; -import type { MockProxy } from 'jest-mock-extended'; - -export interface ProposalValidatorTestParams { - validatorFactory: ( - epochCache: EpochCacheInterface, - opts: { txsPermitted: boolean; maxTxsPerBlock?: number }, - ) => { validate: (proposal: TProposal) => Promise }; - makeProposal: (options?: any) => Promise; - makeHeader: (epochNumber: number | bigint, slotNumber: number | bigint, blockNumber: number | bigint) => any; - getSigner: () => Secp256k1Signer; - getAddress: (signer?: Secp256k1Signer) => EthAddress; - getSlot: (slot: number | bigint) => any; - getTxHashes: (n: number) => TxHash[]; - getTxs: () => any[]; - epochCacheMock: () => MockProxy; -} - -export function sharedProposalValidatorTests( - params: ProposalValidatorTestParams, -) { - const { validatorFactory, makeProposal, makeHeader, getSigner, getAddress, getSlot, getTxHashes, epochCacheMock } = - params; - - describe('shared proposal validation logic', () => { - let epochCache: MockProxy; - let validator: { validate: (proposal: TProposal) => Promise }; - const previousSlot = getSlot(99); - const currentSlot = getSlot(100); - const nextSlot = getSlot(101); - - function mockGetProposer(currentProposer: EthAddress, nextProposer: EthAddress, previousProposer?: EthAddress) { - epochCache.getProposerAttesterAddressInSlot.mockImplementation(slot => { - if (slot === currentSlot) { - return Promise.resolve(currentProposer); - } - if (slot === nextSlot) { - return Promise.resolve(nextProposer); - } - if (slot === previousSlot && previousProposer) { - return Promise.resolve(previousProposer); - } - throw new Error('Unexpected argument'); - }); - } - - beforeEach(() => { - epochCache = epochCacheMock(); - validator = validatorFactory(epochCache, { txsPermitted: true, maxTxsPerBlock: undefined }); - epochCache.getCurrentAndNextSlot.mockReturnValue({ - currentSlot: currentSlot, - nextSlot: nextSlot, - }); - }); - - it('returns high tolerance error if slot number is not current or next slot (outside clock tolerance)', async () => { - const header = makeHeader(1, 99, 99); - const mockProposal = await makeProposal({ blockHeader: header, lastBlockHeader: header }); - - // Mock getEpochAndSlotNow to return time OUTSIDE clock tolerance (1000ms elapsed) - epochCache.getEpochAndSlotNow.mockReturnValue({ - epoch: 1 as any, - slot: currentSlot, - ts: 1000n, // slot started at 1000 seconds - nowMs: 1001000n, // 1000ms elapsed, outside 500ms tolerance - }); - - epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(getAddress()); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.HighToleranceError }); - - // Should not try to resolve proposers if base validation fails - expect(epochCache.getProposerAttesterAddressInSlot).not.toHaveBeenCalled(); - }); - - it('returns ignore if previous slot proposal is within clock tolerance', async () => { - const previousProposer = getSigner(); - const header = makeHeader(1, 99, 99); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: previousProposer, - }); - - // Mock getEpochAndSlotNow to return time WITHIN clock tolerance (100ms elapsed) - epochCache.getEpochAndSlotNow.mockReturnValue({ - epoch: 1 as any, - slot: currentSlot, - ts: 1000n, // slot started at 1000 seconds - nowMs: 1000100n, // 100ms elapsed, within 500ms tolerance - }); - - mockGetProposer(getAddress(), getAddress(), getAddress(previousProposer)); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'ignore' }); - }); - - it('returns mid tolerance error if proposal has invalid signature', async () => { - const currentProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - }); - - // Override getSender to return undefined (invalid signature) - jest.spyOn(mockProposal as any, 'getSender').mockReturnValue(undefined); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); - - // Should not try to resolve proposer if signature is invalid - expect(epochCache.getProposerAttesterAddressInSlot).not.toHaveBeenCalled(); - }); - - it('returns mid tolerance error if proposer is not current proposer for current slot', async () => { - const currentProposer = getSigner(); - const nextProposer = getSigner(); - const invalidProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: invalidProposer, - }); - - mockGetProposer(getAddress(currentProposer), getAddress(nextProposer)); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); - }); - - it('returns mid tolerance error if proposer is not next proposer for next slot', async () => { - const currentProposer = getSigner(); - const nextProposer = getSigner(); - const invalidProposer = getSigner(); - const header = makeHeader(1, 101, 101); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: invalidProposer, - }); - - mockGetProposer(getAddress(currentProposer), getAddress(nextProposer)); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); - }); - - it('returns mid tolerance error if proposer is current proposer but proposal is for next slot', async () => { - const currentProposer = getSigner(); - const nextProposer = getSigner(); - const header = makeHeader(1, 101, 101); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - }); - - mockGetProposer(getAddress(currentProposer), getAddress(nextProposer)); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); - }); - - it('accepts proposal when proposer is undefined (open committee)', async () => { - const currentProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - }); - - epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(undefined); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'accept' }); - }); - - it('returns low tolerance error when getProposerAttesterAddressInSlot throws NoCommitteeError', async () => { - const currentProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - }); - - epochCache.getProposerAttesterAddressInSlot.mockRejectedValue(new NoCommitteeError()); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.LowToleranceError }); - }); - - it('returns undefined if proposal is valid for current slot and proposer', async () => { - const currentProposer = getSigner(); - const nextProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - }); - - mockGetProposer(getAddress(currentProposer), getAddress(nextProposer)); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'accept' }); - }); - - it('returns undefined if proposal is valid for next slot and proposer', async () => { - const currentProposer = getSigner(); - const nextProposer = getSigner(); - const header = makeHeader(1, 101, 101); - const mockProposal = await makeProposal({ blockHeader: header, lastBlockHeader: header, signer: nextProposer }); - - mockGetProposer(getAddress(currentProposer), getAddress(nextProposer)); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'accept' }); - }); - - describe('transaction permission validation', () => { - it('returns mid tolerance error if txs not permitted and proposal contains txHashes', async () => { - const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { - txsPermitted: false, - maxTxsPerBlock: undefined, - }); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes: getTxHashes(2), - }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validatorWithTxsDisabled.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); - }); - - it('returns undefined if txs not permitted but proposal has no txHashes', async () => { - const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { - txsPermitted: false, - maxTxsPerBlock: undefined, - }); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes: getTxHashes(0), - }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validatorWithTxsDisabled.validate(mockProposal); - expect(result).toEqual({ result: 'accept' }); - }); - - it('returns undefined if txs permitted and proposal contains txHashes', async () => { - const currentProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes: getTxHashes(2), - }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'accept' }); - }); - }); - - describe('embedded tx validation', () => { - it('returns mid tolerance error if embedded txs are not listed in txHashes', async () => { - const currentProposer = getSigner(); - const txHashes = getTxHashes(2); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes, - }); - - // Create a fake tx whose hash is NOT in txHashes - const fakeTxHash = getTxHashes(1)[0]; - const fakeTx = { getTxHash: () => fakeTxHash, validateTxHash: () => Promise.resolve(true) }; - Object.defineProperty(mockProposal, 'txs', { get: () => [fakeTx], configurable: true }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); - }); - - it('returns low tolerance error if embedded tx has invalid tx hash', async () => { - const currentProposer = getSigner(); - const txHashes = getTxHashes(2); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes, - }); - - // Create a fake tx whose hash IS in txHashes but validateTxHash returns false - const fakeTx = { getTxHash: () => txHashes[0], validateTxHash: () => Promise.resolve(false) }; - Object.defineProperty(mockProposal, 'txs', { get: () => [fakeTx], configurable: true }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.LowToleranceError }); - }); - }); - - describe('maxTxsPerBlock validation', () => { - it('rejects proposal when txHashes exceed maxTxsPerBlock', async () => { - const validatorWithMaxTxs = validatorFactory(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }); - const currentProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes: getTxHashes(3), - }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validatorWithMaxTxs.validate(mockProposal); - expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.MidToleranceError }); - }); - - it('accepts proposal when txHashes count equals maxTxsPerBlock', async () => { - const validatorWithMaxTxs = validatorFactory(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }); - const currentProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes: getTxHashes(2), - }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validatorWithMaxTxs.validate(mockProposal); - expect(result).toEqual({ result: 'accept' }); - }); - - it('accepts proposal when maxTxsPerBlock is not set (unlimited)', async () => { - // Default validator has no maxTxsPerBlock - const currentProposer = getSigner(); - const header = makeHeader(1, 100, 100); - const mockProposal = await makeProposal({ - blockHeader: header, - lastBlockHeader: header, - signer: currentProposer, - txHashes: getTxHashes(10), - }); - - mockGetProposer(getAddress(currentProposer), getAddress()); - const result = await validator.validate(mockProposal); - expect(result).toEqual({ result: 'accept' }); - }); - }); - }); -} diff --git a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts index fff36a91b996..fdf4d679510f 100644 --- a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts +++ b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts @@ -101,23 +101,6 @@ export class CheckpointProposal extends Gossipable { return this.checkpointHeader.slotNumber; } - get blockNumber(): BlockNumber { - if (!this.lastBlock) { - throw new Error('Cannot get blockNumber without lastBlock'); - } - return this.lastBlock.blockHeader.getBlockNumber(); - } - - /** Convenience getter for txHashes from lastBlock */ - get txHashes(): TxHash[] { - return this.lastBlock?.txHashes ?? []; - } - - /** Convenience getter for txs from lastBlock */ - get txs(): Tx[] | undefined { - return this.lastBlock?.signedTxs?.txs; - } - /** * Extract a BlockProposal from the last block info. * Uses inHash from checkpointHeader.contentCommitment.inHash diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 99d4740b78c7..314969b4ce88 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -150,16 +150,10 @@ export class ValidationService { ); // TODO(spy/ha): Use checkpointNumber instead of blockNumber once CheckpointHeader includes it. - // Currently using lastBlock.blockNumber as a proxy for checkpoint identification in HA signing. + // CheckpointProposalCore doesn't have lastBlock info, so use 0 as a proxy. // blockNumber is NOT used for the primary key so it's safe to use here. // See CheckpointHeader TODO and SigningContext types documentation. - let blockNumber: BlockNumber; - try { - blockNumber = proposal.blockNumber; - } catch { - // Checkpoint proposal may not have lastBlock, use 0 as fallback - blockNumber = BlockNumber(0); - } + const blockNumber = BlockNumber(0); const context: SigningContext = { slot: proposal.slotNumber, blockNumber, diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 76f97ed0d86b..f60f2277eac7 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -520,11 +520,9 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) slotNumber, archive: proposal.archive.toString(), proposer: proposer.toString(), - txCount: proposal.txHashes.length, }; this.log.info(`Received checkpoint proposal for slot ${slotNumber}`, { ...proposalInfo, - txHashes: proposal.txHashes.map(t => t.toString()), fishermanMode: this.config.fishermanMode || false, }); From 8f4c48d87376035ae6f4d9d54ba8bced27b4ec7e Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Wed, 4 Mar 2026 18:57:55 +0100 Subject: [PATCH 21/37] feat: additional validation in public setup allowlist (onlySelf + null msg sender) (#21122) ## Summary Adds two new protocol-level validations to the `PhasesTxValidator` for setup-phase public calls, providing defense-in-depth beyond contract bytecode assertions: - **`onlySelf` validation (A-619)**: Setup functions flagged as `onlySelf` (internal functions like `_set_authorized`, `_increase_public_balance`) are rejected if `msgSender != contractAddress`. This enforces at the sequencer/P2P level that only the contract itself can enqueue calls to its internal functions. - **`rejectNullMsgSender` validation (A-618)**: AuthRegistry's `set_authorized` and `_set_authorized` entries are rejected if `msgSender` is `NULL_MSG_SENDER_CONTRACT_ADDRESS` (the `-1` / max Field sentinel). This prevents malicious transactions from storing authwit approvals under the null sender address, which could be exploited via `consume`. Both flags are opt-in on `AllowedElement` entries, so only explicitly annotated functions are affected. ## Changes - **`stdlib/src/interfaces/allowed_element.ts`**: Added `onlySelf` and `rejectNullMsgSender` optional boolean fields to `AllowedElement` types and Zod schema. - **`stdlib/src/tx/validator/error_texts.ts`**: Added `TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER` and `TX_ERROR_SETUP_NULL_MSG_SENDER` error constants. - **`p2p/.../phases_validator.ts`**: Added checks for both flags in both address-based and class-based allow list matching branches. - **`p2p/.../allowed_public_setup.ts`**: Annotated AuthRegistry entries with `rejectNullMsgSender: true`, and `_set_authorized`/`_increase_public_balance`/Token `_increase_public_balance` with `onlySelf: true`. - **`p2p/.../phases_validator.test.ts`**: Added two new test describe blocks (`onlySelf validation` with 5 tests, `rejectNullMsgSender validation` with 3 tests) covering both address-based and class-based entries. Fixes A-618 Fixes A-619 --- .../tx_validator/allowed_public_setup.ts | 5 + .../tx_validator/phases_validator.test.ts | 184 +++++++++++++++++- .../tx_validator/phases_validator.ts | 22 +++ .../stdlib/src/interfaces/allowed_element.ts | 28 ++- .../stdlib/src/tx/validator/error_texts.ts | 2 + 5 files changed, 236 insertions(+), 5 deletions(-) diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts index b18fa82c4853..6f536c75f09d 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts @@ -22,21 +22,26 @@ export async function getDefaultAllowedSetupFunctions(): Promise { expect(contractDataSource.getContract).not.toHaveBeenCalled(); }); + + describe('onlySelf validation', () => { + let allowedOnlySelfSelector: FunctionSelector; + let allowedOnlySelfContract: AztecAddress; + let allowedOnlySelfClass: Fr; + + beforeEach(() => { + allowedOnlySelfSelector = makeSelector(10); + allowedOnlySelfContract = makeAztecAddress(); + allowedOnlySelfClass = Fr.random(); + + txValidator = new PhasesTxValidator( + contractDataSource, + [ + { + address: allowedOnlySelfContract, + selector: allowedOnlySelfSelector, + onlySelf: true, + }, + { + classId: allowedOnlySelfClass, + selector: allowedOnlySelfSelector, + onlySelf: true, + }, + { + address: allowedContract, + selector: allowedSetupSelector1, + }, + ], + timestamp, + ); + }); + + it('allows onlySelf address entry when msgSender equals contractAddress', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: allowedOnlySelfContract, + selector: allowedOnlySelfSelector, + msgSender: allowedOnlySelfContract, + }); + + await expectValid(tx); + }); + + it('rejects onlySelf address entry when msgSender differs from contractAddress', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: allowedOnlySelfContract, + selector: allowedOnlySelfSelector, + msgSender: makeAztecAddress(999), + }); + + await expectInvalid(tx, TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER); + }); + + it('allows onlySelf class entry when msgSender equals contractAddress', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + const address = await patchNonRevertibleFn(tx, 0, { + selector: allowedOnlySelfSelector, + msgSender: undefined, // will be patched below + }); + + // Patch msgSender to equal contractAddress + tx.data.forPublic!.nonRevertibleAccumulatedData.publicCallRequests[0].msgSender = address; + + contractDataSource.getContract.mockImplementationOnce((contractAddress, atTimestamp) => { + if (timestamp !== atTimestamp) { + throw new Error('Unexpected timestamp'); + } + if (address.equals(contractAddress)) { + return Promise.resolve({ + currentContractClassId: allowedOnlySelfClass, + originalContractClassId: Fr.random(), + } as any); + } + return Promise.resolve(undefined); + }); + + await expectValid(tx); + }); + + it('rejects onlySelf class entry when msgSender differs from contractAddress', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + const address = await patchNonRevertibleFn(tx, 0, { + selector: allowedOnlySelfSelector, + msgSender: makeAztecAddress(), + }); + + contractDataSource.getContract.mockImplementationOnce((contractAddress, atTimestamp) => { + if (timestamp !== atTimestamp) { + throw new Error('Unexpected timestamp'); + } + if (address.equals(contractAddress)) { + return Promise.resolve({ + currentContractClassId: allowedOnlySelfClass, + originalContractClassId: Fr.random(), + } as any); + } + return Promise.resolve(undefined); + }); + + await expectInvalid(tx, TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER); + }); + + it('allows non-onlySelf entry with different msgSender', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: allowedContract, + selector: allowedSetupSelector1, + msgSender: makeAztecAddress(), + }); + + await expectValid(tx); + }); + }); + + describe('rejectNullMsgSender validation', () => { + const nullMsgSender = AztecAddress.fromBigInt(NULL_MSG_SENDER_CONTRACT_ADDRESS); + let rejectNullContract: AztecAddress; + let rejectNullSelector: FunctionSelector; + let noRejectNullContract: AztecAddress; + let noRejectNullSelector: FunctionSelector; + + beforeEach(() => { + rejectNullContract = makeAztecAddress(50); + rejectNullSelector = makeSelector(50); + noRejectNullContract = makeAztecAddress(51); + noRejectNullSelector = makeSelector(51); + + txValidator = new PhasesTxValidator( + contractDataSource, + [ + { + address: rejectNullContract, + selector: rejectNullSelector, + rejectNullMsgSender: true, + }, + { + address: noRejectNullContract, + selector: noRejectNullSelector, + }, + ], + timestamp, + ); + }); + + it('rejects when msgSender is NULL_MSG_SENDER_CONTRACT_ADDRESS', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: rejectNullContract, + selector: rejectNullSelector, + msgSender: nullMsgSender, + }); + + await expectInvalid(tx, TX_ERROR_SETUP_NULL_MSG_SENDER); + }); + + it('allows when msgSender is a normal address', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: rejectNullContract, + selector: rejectNullSelector, + msgSender: makeAztecAddress(100), + }); + + await expectValid(tx); + }); + + it('allows null msgSender on entries without the flag', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: noRejectNullContract, + selector: noRejectNullSelector, + msgSender: nullMsgSender, + }); + + await expectValid(tx); + }); + }); }); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts index 3e8c0f9b3313..5a3fcf018b43 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts @@ -1,5 +1,7 @@ +import { NULL_MSG_SENDER_CONTRACT_ADDRESS } from '@aztec/constants'; import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log'; import { PublicContractsDB, getCallRequestsWithCalldataByPhase } from '@aztec/simulator/server'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; import { @@ -7,6 +9,8 @@ import { TX_ERROR_DURING_VALIDATION, TX_ERROR_SETUP_FUNCTION_NOT_ALLOWED, TX_ERROR_SETUP_FUNCTION_UNKNOWN_CONTRACT, + TX_ERROR_SETUP_NULL_MSG_SENDER, + TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER, Tx, TxExecutionPhase, type TxValidationResult, @@ -84,6 +88,15 @@ export class PhasesTxValidator implements TxValidator { for (const entry of allowList) { if ('address' in entry) { if (contractAddress.equals(entry.address) && entry.selector.equals(functionSelector)) { + if (entry.onlySelf && !publicCall.request.msgSender.equals(contractAddress)) { + return TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER; + } + if ( + entry.rejectNullMsgSender && + publicCall.request.msgSender.equals(AztecAddress.fromBigInt(NULL_MSG_SENDER_CONTRACT_ADDRESS)) + ) { + return TX_ERROR_SETUP_NULL_MSG_SENDER; + } return undefined; } } @@ -105,6 +118,15 @@ export class PhasesTxValidator implements TxValidator { } if (contractClassId.value === entry.classId.toString() && entry.selector.equals(functionSelector)) { + if (entry.onlySelf && !publicCall.request.msgSender.equals(contractAddress)) { + return TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER; + } + if ( + entry.rejectNullMsgSender && + publicCall.request.msgSender.equals(AztecAddress.fromBigInt(NULL_MSG_SENDER_CONTRACT_ADDRESS)) + ) { + return TX_ERROR_SETUP_NULL_MSG_SENDER; + } return undefined; } } diff --git a/yarn-project/stdlib/src/interfaces/allowed_element.ts b/yarn-project/stdlib/src/interfaces/allowed_element.ts index 807f21e5286f..4c7d351f63ed 100644 --- a/yarn-project/stdlib/src/interfaces/allowed_element.ts +++ b/yarn-project/stdlib/src/interfaces/allowed_element.ts @@ -6,14 +6,34 @@ import type { FunctionSelector } from '../abi/function_selector.js'; import type { AztecAddress } from '../aztec-address/index.js'; import { schemas, zodFor } from '../schemas/index.js'; -type AllowedInstanceFunction = { address: AztecAddress; selector: FunctionSelector }; -type AllowedClassFunction = { classId: Fr; selector: FunctionSelector }; +type AllowedInstanceFunction = { + address: AztecAddress; + selector: FunctionSelector; + onlySelf?: boolean; + rejectNullMsgSender?: boolean; +}; +type AllowedClassFunction = { + classId: Fr; + selector: FunctionSelector; + onlySelf?: boolean; + rejectNullMsgSender?: boolean; +}; export type AllowedElement = AllowedInstanceFunction | AllowedClassFunction; export const AllowedElementSchema = zodFor()( z.union([ - z.object({ address: schemas.AztecAddress, selector: schemas.FunctionSelector }), - z.object({ classId: schemas.Fr, selector: schemas.FunctionSelector }), + z.object({ + address: schemas.AztecAddress, + selector: schemas.FunctionSelector, + onlySelf: z.boolean().optional(), + rejectNullMsgSender: z.boolean().optional(), + }), + z.object({ + classId: schemas.Fr, + selector: schemas.FunctionSelector, + onlySelf: z.boolean().optional(), + rejectNullMsgSender: z.boolean().optional(), + }), ]), ); diff --git a/yarn-project/stdlib/src/tx/validator/error_texts.ts b/yarn-project/stdlib/src/tx/validator/error_texts.ts index cf737a2160c4..c9214299d46b 100644 --- a/yarn-project/stdlib/src/tx/validator/error_texts.ts +++ b/yarn-project/stdlib/src/tx/validator/error_texts.ts @@ -7,6 +7,8 @@ export const TX_ERROR_GAS_LIMIT_TOO_HIGH = 'Gas limit is higher than the amount // Phases export const TX_ERROR_SETUP_FUNCTION_NOT_ALLOWED = 'Setup function not on allow list'; export const TX_ERROR_SETUP_FUNCTION_UNKNOWN_CONTRACT = 'Setup function targets unknown contract'; +export const TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER = 'Setup only_self function called with incorrect msg_sender'; +export const TX_ERROR_SETUP_NULL_MSG_SENDER = 'Setup function called with null msg sender'; // Nullifiers export const TX_ERROR_DUPLICATE_NULLIFIER_IN_TX = 'Duplicate nullifier in tx'; From 1d2bbde35eedb7d9cabb37dad41e56930c4d3890 Mon Sep 17 00:00:00 2001 From: danielntmd <162406516+danielntmd@users.noreply.github.com> Date: Wed, 4 Mar 2026 13:13:03 -0500 Subject: [PATCH 22/37] fix: (A-591) aztecProofSubmissionEpochs incorrectly named as aztecProofSubmissionWindow (#21108) - Incorrect config property was silently dropped due to as AztecNodeConfig cast. - Fixes other tests (e.g. gossip) reliant on this config setup. Co-authored-by: danielntmd --- yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts | 2 +- yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 2872ae1ab0c9..7e3e094058f9 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -213,7 +213,7 @@ describe('e2e_p2p_network', () => { return provenBlock > 0; }, 'proven block', - SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES.aztecProofSubmissionWindow * AZTEC_EPOCH_DURATION * AZTEC_SLOT_DURATION, + SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES.aztecProofSubmissionEpochs * AZTEC_EPOCH_DURATION * AZTEC_SLOT_DURATION, ); }); }); diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 6e90af2fad87..62f1cdfb27a7 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -59,7 +59,7 @@ export const WAIT_FOR_TX_TIMEOUT = l1ContractsConfig.aztecSlotDuration * 3; export const SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES = { aztecSlotDuration: 12, ethereumSlotDuration: 4, - aztecProofSubmissionWindow: 640, + aztecProofSubmissionEpochs: 640, }; export class P2PNetworkTest { From cfc9780b6be6853ee8ce14e6c7baa244dc850899 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 4 Mar 2026 15:20:23 -0300 Subject: [PATCH 23/37] refactor(sequencer): rename SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER to SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER (#21125) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Renames `SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER` → `SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER` (and the corresponding `gasPerBlockAllocationMultiplier` config field to `perBlockAllocationMultiplier`), since the multiplier now applies to per-block tx limits as well, not just gas. Co-authored-by: Claude Sonnet 4.6 --- docs/docs-operate/operators/reference/changelog/v4.md | 2 +- yarn-project/foundation/src/config/env_var.ts | 2 +- .../sequencer-client/src/client/sequencer-client.ts | 2 +- yarn-project/sequencer-client/src/config.ts | 8 ++++---- yarn-project/stdlib/src/interfaces/configs.ts | 6 +++--- yarn-project/validator-client/README.md | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/docs-operate/operators/reference/changelog/v4.md b/docs/docs-operate/operators/reference/changelog/v4.md index 599c58e8cfa6..83b29d4758a6 100644 --- a/docs/docs-operate/operators/reference/changelog/v4.md +++ b/docs/docs-operate/operators/reference/changelog/v4.md @@ -90,7 +90,7 @@ The byte-based block size limit has been removed and replaced with field-based b **New (proposer):** ```bash ---gasPerBlockAllocationMultiplier ($SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER) +--perBlockAllocationMultiplier ($SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER) --maxTxsPerCheckpoint ($SEQ_MAX_TX_PER_CHECKPOINT) ``` diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 07b60428cb17..942316271146 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -207,7 +207,7 @@ export type EnvVar = | 'SEQ_PUBLISH_TXS_WITH_PROPOSALS' | 'SEQ_MAX_DA_BLOCK_GAS' | 'SEQ_MAX_L2_BLOCK_GAS' - | 'SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER' + | 'SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER' | 'SEQ_PUBLISHER_PRIVATE_KEY' | 'SEQ_PUBLISHER_PRIVATE_KEYS' | 'SEQ_PUBLISHER_ADDRESSES' diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 613c5d172219..22e1be967576 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -267,7 +267,7 @@ export function computeBlockLimits( enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, }).maxNumberOfBlocks; - const multiplier = config.gasPerBlockAllocationMultiplier ?? DefaultSequencerConfig.gasPerBlockAllocationMultiplier; + const multiplier = config.perBlockAllocationMultiplier ?? DefaultSequencerConfig.perBlockAllocationMultiplier; // Compute maxL2BlockGas let maxL2BlockGas: number; diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index f020431f90a3..117839911491 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -40,7 +40,7 @@ export const DefaultSequencerConfig = { minTxsPerBlock: 1, buildCheckpointIfEmpty: false, publishTxsWithProposals: false, - gasPerBlockAllocationMultiplier: 2, + perBlockAllocationMultiplier: 2, enforceTimeTable: true, attestationPropagationTime: DEFAULT_P2P_PROPAGATION_TIME, secondsBeforeInvalidatingBlockAsCommitteeMember: 144, // 12 L1 blocks @@ -105,12 +105,12 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'The maximum DA block gas.', parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), }, - gasPerBlockAllocationMultiplier: { - env: 'SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER', + perBlockAllocationMultiplier: { + env: 'SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER', description: 'Per-block gas budget multiplier for both L2 and DA gas. Budget per block is (checkpointLimit / maxBlocks) * multiplier.' + ' Values greater than one allow early blocks to use more than their even share, relying on checkpoint-level capping for later blocks.', - ...numberConfigHelper(DefaultSequencerConfig.gasPerBlockAllocationMultiplier), + ...numberConfigHelper(DefaultSequencerConfig.perBlockAllocationMultiplier), }, coinbase: { env: 'COINBASE', diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index a290e0c69b0d..a6009002575f 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -26,7 +26,7 @@ export interface SequencerConfig { /** The maximum DA block gas. */ maxDABlockGas?: number; /** Per-block gas budget multiplier for both L2 and DA gas. Budget = (checkpointLimit / maxBlocks) * multiplier. */ - gasPerBlockAllocationMultiplier?: number; + perBlockAllocationMultiplier?: number; /** Recipient of block reward. */ coinbase?: EthAddress; /** Address to receive fees. */ @@ -93,7 +93,7 @@ export const SequencerConfigSchema = zodFor()( maxL2BlockGas: z.number().optional(), publishTxsWithProposals: z.boolean().optional(), maxDABlockGas: z.number().optional(), - gasPerBlockAllocationMultiplier: z.number().optional(), + perBlockAllocationMultiplier: z.number().optional(), coinbase: schemas.EthAddress.optional(), feeRecipient: schemas.AztecAddress.optional(), acvmWorkingDirectory: z.string().optional(), @@ -142,7 +142,7 @@ type SequencerConfigOptionalKeys = | 'maxTxsPerCheckpoint' | 'maxL2BlockGas' | 'maxDABlockGas' - | 'gasPerBlockAllocationMultiplier'; + | 'perBlockAllocationMultiplier'; export type ResolvedSequencerConfig = Prettify< Required> & Pick diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index 9ae98004e384..4c475117d3d1 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -259,7 +259,7 @@ Per-block budgets prevent one block from consuming the entire checkpoint budget. | `SEQ_MAX_DA_BLOCK_GAS` | *auto* | Per-block DA gas. Auto-derived from checkpoint DA limit / maxBlocks * multiplier. | | `SEQ_MAX_TX_PER_BLOCK` | *none* | Per-block tx count. If `SEQ_MAX_TX_PER_CHECKPOINT` is set and per-block is not, derived as `ceil(checkpointLimit / maxBlocks * multiplier)`. | | `SEQ_MAX_TX_PER_CHECKPOINT` | *none* | Total txs across all blocks in a checkpoint. When set, per-block tx limit is derived from it (unless explicitly overridden) and checkpoint-level capping is enforced. | -| `SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER` | 2 | Multiplier for per-block budget computation. | +| `SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER` | 2 | Multiplier for per-block budget computation. | | `VALIDATOR_MAX_L2_BLOCK_GAS` | *none* | Per-block L2 gas limit for validation. Proposals exceeding this are rejected. | | `VALIDATOR_MAX_DA_BLOCK_GAS` | *none* | Per-block DA gas limit for validation. Proposals exceeding this are rejected. | | `VALIDATOR_MAX_TX_PER_BLOCK` | *none* | Per-block tx count limit for validation. Proposals exceeding this are rejected. | From e0006816ed1306708ce30608dbbfdcf204c8baaa Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Wed, 4 Mar 2026 13:56:12 -0500 Subject: [PATCH 24/37] fix: unbound variable in check_doc_references.sh with set -u (#21126) ## Summary - Fixes CI failure in merge-train/spartan caused by `FILE_TO_DOCS_MAP[$ref_file]: unbound variable` error - The associative array is accessed before any value is set for a key, which triggers an error under `set -u` (bash strict mode) - Uses `${var:-}` default-value syntax to safely handle unset keys ## Root Cause `check_doc_references.sh` line 235 tests `${FILE_TO_DOCS_MAP[$ref_file]}` with `-n`, but under `set -u`, accessing an unset associative array key is an error. Adding `:-` provides a default empty value. CI log: http://ci.aztec-labs.com/1772648512656 ClaudeBox log: http://ci.aztec-labs.com/fb4bc894e7d24f61-1 --- docs/scripts/check_doc_references.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scripts/check_doc_references.sh b/docs/scripts/check_doc_references.sh index cf69741956e1..036ab53d4309 100755 --- a/docs/scripts/check_doc_references.sh +++ b/docs/scripts/check_doc_references.sh @@ -232,7 +232,7 @@ while IFS= read -r ref_file; do while IFS='|' read -r src_file doc_file; do if [[ "$src_file" == "$ref_file" ]]; then # Store in associative array (append to existing value if key exists) - if [[ -n "${FILE_TO_DOCS_MAP[$ref_file]}" ]]; then + if [[ -n "${FILE_TO_DOCS_MAP[$ref_file]:-}" ]]; then FILE_TO_DOCS_MAP[$ref_file]="${FILE_TO_DOCS_MAP[$ref_file]}|${doc_file}" else FILE_TO_DOCS_MAP[$ref_file]="$doc_file" From b7d9cd462269fe61bdc985a99abca93ad07374fc Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Wed, 4 Mar 2026 22:19:45 +0100 Subject: [PATCH 25/37] feat: calldata length validation of public setup function allowlist (#21139) ## Summary - Add calldata length validation to the public setup function allowlist in `PhasesTxValidator`, preventing setup calls with malformed arguments from being accepted - Extend `AllowedElement` with an optional `calldataLength` field and compute expected lengths dynamically from contract artifacts (`AuthRegistry`, `FeeJuice`, `Token`) - Reject setup calls where calldata length doesn't match the expected length for the matched allowlist entry, returning a new `TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH` error ## Details Even when a setup function call matches the allowlist by address/class and selector, it can still revert if the arguments are malformed. Since Aztec's ABI has no variable-size inputs, validating that the calldata length matches the expected length for a given selector is sufficient to guarantee the arguments are deserializable. **AllowedElement type** (`stdlib`): Added optional `calldataLength?: number` to both `AllowedInstanceFunction` and `AllowedClassFunction`, plus corresponding Zod schema updates. **PhasesTxValidator** (`p2p`): After matching an entry by address or class+selector, checks `entry.calldataLength` against `publicCall.calldata.length` before proceeding to `onlySelf`/`rejectNullMsgSender` checks. When `calldataLength` is not set, any length is accepted (backwards compatible). **Default allowlist** (`p2p`): Uses `getFunctionArtifactByName` + `countArgumentsSize` from `stdlib/abi` to compute expected calldata lengths from `AuthRegistryArtifact`, `FeeJuiceArtifact`, and `TokenContractArtifact`. Fixes A-612 --- .../tx_validator/allowed_public_setup.ts | 22 +++- .../tx_validator/phases_validator.test.ts | 109 ++++++++++++++++++ .../tx_validator/phases_validator.ts | 7 ++ .../stdlib/src/interfaces/allowed_element.ts | 4 + .../stdlib/src/tx/validator/error_texts.ts | 1 + 5 files changed, 142 insertions(+), 1 deletion(-) diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts index 6f536c75f09d..6e78567c039c 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts @@ -1,9 +1,24 @@ import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; -import { FunctionSelector } from '@aztec/stdlib/abi'; +import { AuthRegistryArtifact } from '@aztec/protocol-contracts/auth-registry'; +import { FeeJuiceArtifact } from '@aztec/protocol-contracts/fee-juice'; +import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; +import type { ContractArtifact, FunctionAbi } from '@aztec/stdlib/abi'; import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; +/** Returns the expected calldata length for a function: 1 (selector) + arguments size. */ +function getCalldataLength(artifact: ContractArtifact, functionName: string): number { + const allFunctions: FunctionAbi[] = (artifact.functions as FunctionAbi[]).concat( + artifact.nonDispatchPublicFunctions || [], + ); + const fn = allFunctions.find(f => f.name === functionName); + if (!fn) { + throw new Error(`Unknown function ${functionName} in artifact ${artifact.name}`); + } + return 1 + countArgumentsSize(fn); +} + let defaultAllowedSetupFunctions: AllowedElement[] | undefined; /** Returns the default list of functions allowed to run in the setup phase of a transaction. */ @@ -22,6 +37,7 @@ export async function getDefaultAllowedSetupFunctions(): Promise { }); }); + describe('calldataLength validation', () => { + const expectedLength = 4; // 1 selector + 3 args + let calldataContract: AztecAddress; + let calldataSelector: FunctionSelector; + let calldataClassId: Fr; + + beforeEach(() => { + calldataContract = makeAztecAddress(70); + calldataSelector = makeSelector(70); + calldataClassId = Fr.random(); + + txValidator = new PhasesTxValidator( + contractDataSource, + [ + { + address: calldataContract, + selector: calldataSelector, + calldataLength: expectedLength, + }, + { + classId: calldataClassId, + selector: calldataSelector, + calldataLength: expectedLength, + }, + ], + timestamp, + ); + }); + + it('allows address entry with correct calldata length', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: calldataContract, + selector: calldataSelector, + args: [Fr.random(), Fr.random(), Fr.random()], + }); + + await expectValid(tx); + }); + + it('rejects address entry with too short calldata', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: calldataContract, + selector: calldataSelector, + args: [Fr.random()], + }); + + await expectInvalid(tx, TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH); + }); + + it('rejects address entry with too long calldata', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: calldataContract, + selector: calldataSelector, + args: [Fr.random(), Fr.random(), Fr.random(), Fr.random(), Fr.random()], + }); + + await expectInvalid(tx, TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH); + }); + + it('rejects class entry with wrong calldata length', async () => { + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + const address = await patchNonRevertibleFn(tx, 0, { + selector: calldataSelector, + args: [Fr.random()], + }); + + contractDataSource.getContract.mockImplementationOnce((contractAddress, atTimestamp) => { + if (timestamp !== atTimestamp) { + throw new Error('Unexpected timestamp'); + } + if (address.equals(contractAddress)) { + return Promise.resolve({ + currentContractClassId: calldataClassId, + originalContractClassId: Fr.random(), + } as any); + } + return Promise.resolve(undefined); + }); + + await expectInvalid(tx, TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH); + }); + + it('allows any calldata length when calldataLength is not set', async () => { + txValidator = new PhasesTxValidator( + contractDataSource, + [ + { + address: calldataContract, + selector: calldataSelector, + }, + ], + timestamp, + ); + + const tx = await mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); + await patchNonRevertibleFn(tx, 0, { + address: calldataContract, + selector: calldataSelector, + args: [Fr.random(), Fr.random(), Fr.random(), Fr.random(), Fr.random(), Fr.random()], + }); + + await expectValid(tx); + }); + }); + describe('rejectNullMsgSender validation', () => { const nullMsgSender = AztecAddress.fromBigInt(NULL_MSG_SENDER_CONTRACT_ADDRESS); let rejectNullContract: AztecAddress; diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts index 5a3fcf018b43..9de8f7ef19e2 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/phases_validator.ts @@ -11,6 +11,7 @@ import { TX_ERROR_SETUP_FUNCTION_UNKNOWN_CONTRACT, TX_ERROR_SETUP_NULL_MSG_SENDER, TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER, + TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH, Tx, TxExecutionPhase, type TxValidationResult, @@ -88,6 +89,9 @@ export class PhasesTxValidator implements TxValidator { for (const entry of allowList) { if ('address' in entry) { if (contractAddress.equals(entry.address) && entry.selector.equals(functionSelector)) { + if (entry.calldataLength !== undefined && publicCall.calldata.length !== entry.calldataLength) { + return TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH; + } if (entry.onlySelf && !publicCall.request.msgSender.equals(contractAddress)) { return TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER; } @@ -118,6 +122,9 @@ export class PhasesTxValidator implements TxValidator { } if (contractClassId.value === entry.classId.toString() && entry.selector.equals(functionSelector)) { + if (entry.calldataLength !== undefined && publicCall.calldata.length !== entry.calldataLength) { + return TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH; + } if (entry.onlySelf && !publicCall.request.msgSender.equals(contractAddress)) { return TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER; } diff --git a/yarn-project/stdlib/src/interfaces/allowed_element.ts b/yarn-project/stdlib/src/interfaces/allowed_element.ts index 4c7d351f63ed..efc1a6b467bf 100644 --- a/yarn-project/stdlib/src/interfaces/allowed_element.ts +++ b/yarn-project/stdlib/src/interfaces/allowed_element.ts @@ -11,12 +11,14 @@ type AllowedInstanceFunction = { selector: FunctionSelector; onlySelf?: boolean; rejectNullMsgSender?: boolean; + calldataLength?: number; }; type AllowedClassFunction = { classId: Fr; selector: FunctionSelector; onlySelf?: boolean; rejectNullMsgSender?: boolean; + calldataLength?: number; }; export type AllowedElement = AllowedInstanceFunction | AllowedClassFunction; @@ -28,12 +30,14 @@ export const AllowedElementSchema = zodFor()( selector: schemas.FunctionSelector, onlySelf: z.boolean().optional(), rejectNullMsgSender: z.boolean().optional(), + calldataLength: z.number().optional(), }), z.object({ classId: schemas.Fr, selector: schemas.FunctionSelector, onlySelf: z.boolean().optional(), rejectNullMsgSender: z.boolean().optional(), + calldataLength: z.number().optional(), }), ]), ); diff --git a/yarn-project/stdlib/src/tx/validator/error_texts.ts b/yarn-project/stdlib/src/tx/validator/error_texts.ts index c9214299d46b..6a8326f032a8 100644 --- a/yarn-project/stdlib/src/tx/validator/error_texts.ts +++ b/yarn-project/stdlib/src/tx/validator/error_texts.ts @@ -9,6 +9,7 @@ export const TX_ERROR_SETUP_FUNCTION_NOT_ALLOWED = 'Setup function not on allow export const TX_ERROR_SETUP_FUNCTION_UNKNOWN_CONTRACT = 'Setup function targets unknown contract'; export const TX_ERROR_SETUP_ONLY_SELF_WRONG_SENDER = 'Setup only_self function called with incorrect msg_sender'; export const TX_ERROR_SETUP_NULL_MSG_SENDER = 'Setup function called with null msg sender'; +export const TX_ERROR_SETUP_WRONG_CALLDATA_LENGTH = 'Setup function called with wrong calldata length'; // Nullifiers export const TX_ERROR_DUPLICATE_NULLIFIER_IN_TX = 'Duplicate nullifier in tx'; From 55ddc154f12214d2b5b724bde4e0ff7ecb46b572 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Wed, 4 Mar 2026 22:50:37 +0000 Subject: [PATCH 26/37] fix: include mismatched values in tx metadata validation errors --- .../aztec-node/src/aztec-node/server.test.ts | 10 ++++++++-- .../tx_validator/metadata_validator.test.ts | 5 ++++- .../tx_validator/metadata_validator.ts | 16 ++++++++++++---- 3 files changed, 24 insertions(+), 7 deletions(-) diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 17d6f3f51928..5e4f6df83949 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -238,7 +238,10 @@ describe('aztec node', () => { tx.data.constants.txContext.chainId = new Fr(1n + chainId.toBigInt()); await tx.recomputeHash(); - expect(await node.isValidTx(tx)).toEqual({ result: 'invalid', reason: [TX_ERROR_INCORRECT_L1_CHAIN_ID] }); + expect(await node.isValidTx(tx)).toEqual({ + result: 'invalid', + reason: [expect.stringContaining(TX_ERROR_INCORRECT_L1_CHAIN_ID)], + }); }); it('tests that the node correctly validates rollup version', async () => { @@ -249,7 +252,10 @@ describe('aztec node', () => { tx.data.constants.txContext.version = new Fr(1n + rollupVersion.toBigInt()); await tx.recomputeHash(); - expect(await node.isValidTx(tx)).toEqual({ result: 'invalid', reason: [TX_ERROR_INCORRECT_ROLLUP_VERSION] }); + expect(await node.isValidTx(tx)).toEqual({ + result: 'invalid', + reason: [expect.stringContaining(TX_ERROR_INCORRECT_ROLLUP_VERSION)], + }); }); it('tests that the node correctly validates oversized transactions', async () => { diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts index 0cb737b7db57..e90aa7c7bed6 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.test.ts @@ -42,7 +42,10 @@ describe('MetadataTxValidator', () => { }; const expectInvalid = async (tx: Tx, reason: string) => { - await expect(validator.validateTx(tx)).resolves.toEqual({ result: 'invalid', reason: [reason] }); + await expect(validator.validateTx(tx)).resolves.toEqual({ + result: 'invalid', + reason: [expect.stringContaining(reason)], + }); }; const makeTxs = async () => { diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts index 62233d3d8c4d..4f15248db82c 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/metadata_validator.ts @@ -28,16 +28,24 @@ export class MetadataTxValidator implements TxValidator { validateTx(tx: T): Promise { const errors = []; if (!this.#hasCorrectL1ChainId(tx)) { - errors.push(TX_ERROR_INCORRECT_L1_CHAIN_ID); + errors.push( + `${TX_ERROR_INCORRECT_L1_CHAIN_ID} (tx: ${tx.data.constants.txContext.chainId.toNumber()}, expected: ${this.values.l1ChainId.toNumber()})`, + ); } if (!this.#hasCorrectRollupVersion(tx)) { - errors.push(TX_ERROR_INCORRECT_ROLLUP_VERSION); + errors.push( + `${TX_ERROR_INCORRECT_ROLLUP_VERSION} (tx: ${tx.data.constants.txContext.version.toNumber()}, expected: ${this.values.rollupVersion.toNumber()})`, + ); } if (!this.#hasCorrectVkTreeRoot(tx)) { - errors.push(TX_ERROR_INCORRECT_VK_TREE_ROOT); + errors.push( + `${TX_ERROR_INCORRECT_VK_TREE_ROOT} (tx: ${tx.data.constants.vkTreeRoot.toString()}, expected: ${this.values.vkTreeRoot.toString()})`, + ); } if (!this.#hasCorrectprotocolContractsHash(tx)) { - errors.push(TX_ERROR_INCORRECT_PROTOCOL_CONTRACTS_HASH); + errors.push( + `${TX_ERROR_INCORRECT_PROTOCOL_CONTRACTS_HASH} (tx: ${tx.data.constants.protocolContractsHash.toString()}, expected: ${this.values.protocolContractsHash.toString()})`, + ); } return Promise.resolve(errors.length > 0 ? { result: 'invalid', reason: errors } : { result: 'valid' }); } From bea8dd7a1817d0d99c045a2e4ff4acf0edf3b752 Mon Sep 17 00:00:00 2001 From: spypsy <6403450+spypsy@users.noreply.github.com> Date: Thu, 5 Mar 2026 11:32:21 +0000 Subject: [PATCH 27/37] feat: single-node implementation of slash-protection signer Fixes [A-477](https://linear.app/aztec-labs/issue/A-477/lightweight-ha-like-signer-to-prevent-duplicate-proposals) - Introduces `LocalSignerWithProtection` which will be used by default if no high-availability config has been provided - Moves around a bunch of things bc `DataStoreConfig` had to be moved into `stdlib` in order to be required by `validator-ha-signer` Co-authored-by: Alex Gherghisan --- yarn-project/archiver/src/factory.ts | 2 +- .../aztec-node/src/aztec-node/config.ts | 2 +- .../aztec-node/src/sentinel/factory.ts | 2 +- .../aztec/src/cli/aztec_start_options.ts | 2 +- .../aztec/src/cli/cmds/start_archiver.ts | 2 +- yarn-project/bot/src/config.ts | 3 +- yarn-project/foundation/src/config/env_var.ts | 1 + yarn-project/kv-store/package.json | 3 +- yarn-project/kv-store/src/indexeddb/index.ts | 2 +- yarn-project/kv-store/src/lmdb-v2/factory.ts | 2 +- yarn-project/kv-store/src/lmdb/index.ts | 2 +- .../node-lib/src/actions/snapshot-sync.ts | 5 +- .../node-lib/src/actions/upload-snapshot.ts | 2 +- .../node-lib/src/factories/l1_tx_utils.ts | 2 +- yarn-project/p2p/src/client/factory.ts | 2 +- .../proposal_tx_collector_worker.ts | 2 +- yarn-project/p2p/src/config.ts | 2 +- .../src/test-helpers/make-test-p2p-clients.ts | 2 +- .../p2p/src/test-helpers/reqresp-nodes.ts | 2 +- .../testbench/p2p_client_testbench_worker.ts | 2 +- yarn-project/p2p/src/util.test.ts | 2 +- yarn-project/p2p/src/util.ts | 2 +- .../src/proving_broker/config.ts | 2 +- .../src/actions/rerun-epoch-proving-job.ts | 2 +- .../src/actions/upload-epoch-proof-failure.ts | 2 +- yarn-project/prover-node/src/config.ts | 2 +- yarn-project/prover-node/src/prover-node.ts | 2 +- yarn-project/pxe/src/config/index.ts | 2 +- .../checkpoint_voter.ha.integration.test.ts | 1 + .../slasher/src/factory/create_facade.ts | 2 +- .../src/factory/create_implementation.ts | 2 +- .../slasher/src/slasher_client_facade.ts | 2 +- yarn-project/stdlib/package.json | 3 +- yarn-project/stdlib/src/ha-signing/config.ts | 83 ++-- yarn-project/stdlib/src/ha-signing/index.ts | 9 + .../stdlib/src/ha-signing/local_config.ts | 46 ++ .../src/interfaces/aztec-node-admin.test.ts | 1 + .../stdlib/src/interfaces/validator.ts | 74 ++-- .../src => stdlib/src/kv-store}/config.ts | 2 +- yarn-project/stdlib/src/kv-store/index.ts | 1 + yarn-project/validator-client/src/config.ts | 3 +- .../src/validator.ha.integration.test.ts | 1 + .../src/validator.integration.test.ts | 1 + .../validator-client/src/validator.test.ts | 3 +- .../validator-client/src/validator.ts | 43 +- yarn-project/validator-ha-signer/README.md | 2 - yarn-project/validator-ha-signer/package.json | 4 +- .../validator-ha-signer/src/db/index.ts | 1 + .../validator-ha-signer/src/db/lmdb.test.ts | 417 ++++++++++++++++++ .../validator-ha-signer/src/db/lmdb.ts | 264 +++++++++++ .../validator-ha-signer/src/db/postgres.ts | 24 +- .../validator-ha-signer/src/db/types.ts | 61 ++- .../validator-ha-signer/src/factory.ts | 54 ++- .../src/slashing_protection_service.test.ts | 5 +- .../src/slashing_protection_service.ts | 4 +- yarn-project/validator-ha-signer/src/types.ts | 5 + .../src/validator_ha_signer.test.ts | 27 +- .../src/validator_ha_signer.ts | 9 +- .../validator-ha-signer/tsconfig.json | 3 + .../world-state/src/synchronizer/factory.ts | 2 +- .../world-state/src/test/integration.test.ts | 2 +- yarn-project/yarn.lock | 1 + 62 files changed, 1030 insertions(+), 192 deletions(-) create mode 100644 yarn-project/stdlib/src/ha-signing/local_config.ts rename yarn-project/{kv-store/src => stdlib/src/kv-store}/config.ts (97%) create mode 100644 yarn-project/stdlib/src/kv-store/index.ts create mode 100644 yarn-project/validator-ha-signer/src/db/lmdb.test.ts create mode 100644 yarn-project/validator-ha-signer/src/db/lmdb.ts diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index f7f2d46b44db..c0273939a28b 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -7,7 +7,6 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { merge } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { DateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { createStore } from '@aztec/kv-store/lmdb-v2'; import { protocolContractNames } from '@aztec/protocol-contracts'; import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle'; @@ -15,6 +14,7 @@ import { FunctionType, decodeFunctionSignature } from '@aztec/stdlib/abi'; import type { ArchiverEmitter } from '@aztec/stdlib/block'; import { type ContractClassPublic, computePublicBytecodeCommitment } from '@aztec/stdlib/contract'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { EventEmitter } from 'events'; diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index 27d433cc6db4..5c74334a94be 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -3,7 +3,6 @@ import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethe import { type L1ContractAddresses, l1ContractAddressesMapping } from '@aztec/ethereum/l1-contract-addresses'; import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { type KeyStore, type ValidatorKeyStore, @@ -27,6 +26,7 @@ import { slasherConfigMappings } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type NodeRPCConfig, nodeRpcConfigMappings } from '@aztec/stdlib/config'; import type { SlasherConfig } from '@aztec/stdlib/interfaces/server'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; import { type ValidatorClientConfig, validatorClientConfigMappings } from '@aztec/validator-client/config'; import { type WorldStateConfig, worldStateConfigMappings } from '@aztec/world-state/config'; diff --git a/yarn-project/aztec-node/src/sentinel/factory.ts b/yarn-project/aztec-node/src/sentinel/factory.ts index 15f0caf22582..251b9086a627 100644 --- a/yarn-project/aztec-node/src/sentinel/factory.ts +++ b/yarn-project/aztec-node/src/sentinel/factory.ts @@ -1,10 +1,10 @@ import type { EpochCache } from '@aztec/epoch-cache'; import { createLogger } from '@aztec/foundation/log'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { createStore } from '@aztec/kv-store/lmdb-v2'; import type { P2PClient } from '@aztec/p2p'; import type { L2BlockSource } from '@aztec/stdlib/block'; import type { SlasherConfig } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import type { SentinelConfig } from './config.js'; import { Sentinel } from './sentinel.js'; diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 863291bf19c1..269609bd1911 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -12,7 +12,6 @@ import { isBooleanConfigValue, omitConfigMappings, } from '@aztec/foundation/config'; -import { dataConfigMappings } from '@aztec/kv-store/config'; import { sharedNodeConfigMappings } from '@aztec/node-lib/config'; import { bootnodeConfigMappings, p2pConfigMappings } from '@aztec/p2p/config'; import { proverAgentConfigMappings, proverBrokerConfigMappings } from '@aztec/prover-client/broker/config'; @@ -20,6 +19,7 @@ import { proverNodeConfigMappings } from '@aztec/prover-node/config'; import { allPxeConfigMappings } from '@aztec/pxe/config'; import { sequencerClientConfigMappings } from '@aztec/sequencer-client/config'; import { chainConfigMappings, nodeRpcConfigMappings } from '@aztec/stdlib/config'; +import { dataConfigMappings } from '@aztec/stdlib/kv-store'; import { telemetryClientConfigMappings } from '@aztec/telemetry-client/config'; import { worldStateConfigMappings } from '@aztec/world-state/config'; diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index d4c95bcd7e6e..2063fadbb6ed 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -3,8 +3,8 @@ import { createLogger } from '@aztec/aztec.js/log'; import { type BlobClientConfig, blobClientConfigMapping, createBlobClient } from '@aztec/blob-client/client'; import { getL1Config } from '@aztec/cli/config'; import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; -import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { ArchiverApiSchema } from '@aztec/stdlib/interfaces/server'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; import { getConfigEnvVars as getTelemetryClientConfig, initTelemetryClient } from '@aztec/telemetry-client'; import { extractRelevantOptions } from '../util.js'; diff --git a/yarn-project/bot/src/config.ts b/yarn-project/bot/src/config.ts index 1e1b104be9b9..3efc3c977348 100644 --- a/yarn-project/bot/src/config.ts +++ b/yarn-project/bot/src/config.ts @@ -11,9 +11,9 @@ import { secretStringConfigHelper, } from '@aztec/foundation/config'; import { Fr } from '@aztec/foundation/curves/bn254'; -import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractsHash } from '@aztec/protocol-contracts'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; import { schemas, zodFor } from '@aztec/stdlib/schemas'; import type { ComponentsVersions } from '@aztec/stdlib/versioning'; @@ -130,7 +130,6 @@ export const BotConfigSchema = zodFor()( l1Mnemonic: undefined, l1PrivateKey: undefined, senderPrivateKey: undefined, - dataDirectory: undefined, dataStoreMapSizeKb: 1_024 * 1_024, ...config, })), diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 942316271146..666bd5a54f19 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -62,6 +62,7 @@ export type EnvVar = | 'BLOB_SINK_MAP_SIZE_KB' | 'P2P_STORE_MAP_SIZE_KB' | 'PROVER_BROKER_STORE_MAP_SIZE_KB' + | 'SIGNING_PROTECTION_MAP_SIZE_KB' | 'WS_DB_MAP_SIZE_KB' | 'ARCHIVE_TREE_MAP_SIZE_KB' | 'NULLIFIER_TREE_MAP_SIZE_KB' diff --git a/yarn-project/kv-store/package.json b/yarn-project/kv-store/package.json index e2edcd04f409..a6738123cb71 100644 --- a/yarn-project/kv-store/package.json +++ b/yarn-project/kv-store/package.json @@ -8,8 +8,7 @@ "./lmdb": "./dest/lmdb/index.js", "./lmdb-v2": "./dest/lmdb-v2/index.js", "./indexeddb": "./dest/indexeddb/index.js", - "./stores": "./dest/stores/index.js", - "./config": "./dest/config.js" + "./stores": "./dest/stores/index.js" }, "scripts": { "build": "yarn clean && ../scripts/tsc.sh", diff --git a/yarn-project/kv-store/src/indexeddb/index.ts b/yarn-project/kv-store/src/indexeddb/index.ts index e913c7d33fcc..e0dd61a961e8 100644 --- a/yarn-project/kv-store/src/indexeddb/index.ts +++ b/yarn-project/kv-store/src/indexeddb/index.ts @@ -1,6 +1,6 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; -import type { DataStoreConfig } from '../config.js'; import { initStoreForRollupAndSchemaVersion } from '../utils.js'; import { AztecIndexedDBStore } from './store.js'; diff --git a/yarn-project/kv-store/src/lmdb-v2/factory.ts b/yarn-project/kv-store/src/lmdb-v2/factory.ts index c61a352b070c..73a8c48295ca 100644 --- a/yarn-project/kv-store/src/lmdb-v2/factory.ts +++ b/yarn-project/kv-store/src/lmdb-v2/factory.ts @@ -1,12 +1,12 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { type LoggerBindings, createLogger } from '@aztec/foundation/log'; import { DatabaseVersionManager } from '@aztec/stdlib/database-version/manager'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { mkdir, mkdtemp, rm } from 'fs/promises'; import { tmpdir } from 'os'; import { join } from 'path'; -import type { DataStoreConfig } from '../config.js'; import { AztecLMDBStoreV2 } from './store.js'; const MAX_READERS = 16; diff --git a/yarn-project/kv-store/src/lmdb/index.ts b/yarn-project/kv-store/src/lmdb/index.ts index bcaf1f307d8c..aac0204c4655 100644 --- a/yarn-project/kv-store/src/lmdb/index.ts +++ b/yarn-project/kv-store/src/lmdb/index.ts @@ -1,8 +1,8 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { join } from 'path'; -import type { DataStoreConfig } from '../config.js'; import { initStoreForRollupAndSchemaVersion } from '../utils.js'; import { AztecLmdbStore } from './store.js'; diff --git a/yarn-project/node-lib/src/actions/snapshot-sync.ts b/yarn-project/node-lib/src/actions/snapshot-sync.ts index d60760bf6aa1..03d90ca67c51 100644 --- a/yarn-project/node-lib/src/actions/snapshot-sync.ts +++ b/yarn-project/node-lib/src/actions/snapshot-sync.ts @@ -5,11 +5,11 @@ import type { L1ContractsConfig } from '@aztec/ethereum/config'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { tryRmDir } from '@aztec/foundation/fs'; import type { Logger } from '@aztec/foundation/log'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { P2P_STORE_NAME } from '@aztec/p2p'; import type { ChainConfig } from '@aztec/stdlib/config'; import { DatabaseVersionManager } from '@aztec/stdlib/database-version/manager'; import { type ReadOnlyFileStore, createReadOnlyFileStore } from '@aztec/stdlib/file-store'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { type SnapshotMetadata, type SnapshotsIndexMetadata, @@ -31,7 +31,8 @@ type SnapshotSyncConfig = Pick & Pick & Pick & Pick & - Required & + DataStoreConfig & + Required> & EthereumClientConfig & { snapshotsUrls?: string[]; minL1BlocksToTriggerReplace?: number; diff --git a/yarn-project/node-lib/src/actions/upload-snapshot.ts b/yarn-project/node-lib/src/actions/upload-snapshot.ts index 82b62a350dce..0bb1446b4314 100644 --- a/yarn-project/node-lib/src/actions/upload-snapshot.ts +++ b/yarn-project/node-lib/src/actions/upload-snapshot.ts @@ -1,10 +1,10 @@ import { ARCHIVER_DB_VERSION, type Archiver } from '@aztec/archiver'; import { tryRmDir } from '@aztec/foundation/fs'; import type { Logger } from '@aztec/foundation/log'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import type { ChainConfig } from '@aztec/stdlib/config'; import { createFileStore } from '@aztec/stdlib/file-store'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { uploadSnapshotToIndex } from '@aztec/stdlib/snapshots'; import { WORLD_STATE_DB_VERSION } from '@aztec/world-state'; diff --git a/yarn-project/node-lib/src/factories/l1_tx_utils.ts b/yarn-project/node-lib/src/factories/l1_tx_utils.ts index d3a441c66bb9..4799c3f93e08 100644 --- a/yarn-project/node-lib/src/factories/l1_tx_utils.ts +++ b/yarn-project/node-lib/src/factories/l1_tx_utils.ts @@ -8,8 +8,8 @@ import { omit } from '@aztec/foundation/collection'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; import type { DateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { createStore } from '@aztec/kv-store/lmdb-v2'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import type { TelemetryClient } from '@aztec/telemetry-client'; import type { L1TxScope } from '../metrics/l1_tx_metrics.js'; diff --git a/yarn-project/p2p/src/client/factory.ts b/yarn-project/p2p/src/client/factory.ts index 0895c1b624cc..9042d48ec9ae 100644 --- a/yarn-project/p2p/src/client/factory.ts +++ b/yarn-project/p2p/src/client/factory.ts @@ -3,12 +3,12 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { AztecLMDBStoreV2, createStore } from '@aztec/kv-store/lmdb-v2'; import type { L2BlockSource } from '@aztec/stdlib/block'; import type { ChainConfig } from '@aztec/stdlib/config'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import type { AztecNode, ClientProtocolCircuitVerifier, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; import { P2PClient } from '../client/p2p_client.js'; diff --git a/yarn-project/p2p/src/client/test/tx_proposal_collector/proposal_tx_collector_worker.ts b/yarn-project/p2p/src/client/test/tx_proposal_collector/proposal_tx_collector_worker.ts index e1f054b98a02..a76672a1e1de 100644 --- a/yarn-project/p2p/src/client/test/tx_proposal_collector/proposal_tx_collector_worker.ts +++ b/yarn-project/p2p/src/client/test/tx_proposal_collector/proposal_tx_collector_worker.ts @@ -3,11 +3,11 @@ import { SecretValue } from '@aztec/foundation/config'; import { createLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; import { DateProvider, Timer, executeTimeout } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import type { L2BlockSource } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import type { ClientProtocolCircuitVerifier } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; import type { Tx, TxValidationResult } from '@aztec/stdlib/tx'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index fc27123df79c..f3dbefe44d4d 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -11,7 +11,6 @@ import { secretStringConfigHelper, } from '@aztec/foundation/config'; import { Fr } from '@aztec/foundation/curves/bn254'; -import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { FunctionSelector } from '@aztec/stdlib/abi/function-selector'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { @@ -21,6 +20,7 @@ import { chainConfigMappings, sharedSequencerConfigMappings, } from '@aztec/stdlib/config'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; import { type BatchTxRequesterConfig, diff --git a/yarn-project/p2p/src/test-helpers/make-test-p2p-clients.ts b/yarn-project/p2p/src/test-helpers/make-test-p2p-clients.ts index 1bb554f79b06..3ba6375f5d3c 100644 --- a/yarn-project/p2p/src/test-helpers/make-test-p2p-clients.ts +++ b/yarn-project/p2p/src/test-helpers/make-test-p2p-clients.ts @@ -4,9 +4,9 @@ import { SecretValue } from '@aztec/foundation/config'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { createP2PClient } from '../client/index.js'; import type { P2PClient } from '../client/p2p_client.js'; diff --git a/yarn-project/p2p/src/test-helpers/reqresp-nodes.ts b/yarn-project/p2p/src/test-helpers/reqresp-nodes.ts index f0e6f04232e2..72f9145ab1fb 100644 --- a/yarn-project/p2p/src/test-helpers/reqresp-nodes.ts +++ b/yarn-project/p2p/src/test-helpers/reqresp-nodes.ts @@ -2,7 +2,6 @@ import type { EpochCache } from '@aztec/epoch-cache'; import { timesParallel } from '@aztec/foundation/collection'; import { SecretValue } from '@aztec/foundation/config'; import { createLogger } from '@aztec/foundation/log'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import type { L2BlockSource } from '@aztec/stdlib/block'; import { type ChainConfig, emptyChainConfig } from '@aztec/stdlib/config'; @@ -12,6 +11,7 @@ import type { IVCProofVerificationResult, WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import type { Tx } from '@aztec/stdlib/tx'; import { compressComponentVersions } from '@aztec/stdlib/versioning'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; diff --git a/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts b/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts index dc2fa88beb89..e8cb8ac7ca46 100644 --- a/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts +++ b/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts @@ -12,13 +12,13 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; import { DateProvider, Timer } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractsHash } from '@aztec/protocol-contracts'; import type { L2BlockSource } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import type { ClientProtocolCircuitVerifier, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { type BlockProposal, P2PMessage } from '@aztec/stdlib/p2p'; import { ChonkProof } from '@aztec/stdlib/proofs'; import { makeAztecAddress, makeBlockHeader, makeBlockProposal, mockTx } from '@aztec/stdlib/testing'; diff --git a/yarn-project/p2p/src/util.test.ts b/yarn-project/p2p/src/util.test.ts index 0ae0a6c958aa..fec51a67f641 100644 --- a/yarn-project/p2p/src/util.test.ts +++ b/yarn-project/p2p/src/util.test.ts @@ -1,8 +1,8 @@ import { SecretValue } from '@aztec/foundation/config'; import { createLogger } from '@aztec/foundation/log'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { generateKeyPair, marshalPrivateKey } from '@libp2p/crypto/keys'; import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; diff --git a/yarn-project/p2p/src/util.ts b/yarn-project/p2p/src/util.ts index ab14817d8b47..37bba2f5f0b9 100644 --- a/yarn-project/p2p/src/util.ts +++ b/yarn-project/p2p/src/util.ts @@ -1,7 +1,7 @@ import { SecretValue } from '@aztec/foundation/config'; import type { Logger } from '@aztec/foundation/log'; import type { AztecAsyncKVStore, AztecAsyncSingleton } from '@aztec/kv-store'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import type { GossipSub } from '@chainsafe/libp2p-gossipsub'; import { generateKeyPair, marshalPrivateKey, unmarshalPrivateKey } from '@libp2p/crypto/keys'; diff --git a/yarn-project/prover-client/src/proving_broker/config.ts b/yarn-project/prover-client/src/proving_broker/config.ts index d8b875858ad8..6a5672117b98 100644 --- a/yarn-project/prover-client/src/proving_broker/config.ts +++ b/yarn-project/prover-client/src/proving_broker/config.ts @@ -6,8 +6,8 @@ import { numberConfigHelper, } from '@aztec/foundation/config'; import { pickConfigMappings } from '@aztec/foundation/config'; -import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { type ChainConfig, chainConfigMappings } from '@aztec/stdlib/config'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; import { ProvingRequestType } from '@aztec/stdlib/proofs'; import { z } from 'zod'; diff --git a/yarn-project/prover-node/src/actions/rerun-epoch-proving-job.ts b/yarn-project/prover-node/src/actions/rerun-epoch-proving-job.ts index 2108d5a09342..2e619fa93be5 100644 --- a/yarn-project/prover-node/src/actions/rerun-epoch-proving-job.ts +++ b/yarn-project/prover-node/src/actions/rerun-epoch-proving-job.ts @@ -1,10 +1,10 @@ import { createArchiverStore } from '@aztec/archiver'; import type { L1ContractsConfig } from '@aztec/ethereum/config'; import type { Logger } from '@aztec/foundation/log'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { type ProverClientConfig, createProverClient } from '@aztec/prover-client'; import { ProverBrokerConfig, createAndStartProvingBroker } from '@aztec/prover-client/broker'; import { PublicProcessorFactory } from '@aztec/simulator/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { createWorldState } from '@aztec/world-state'; diff --git a/yarn-project/prover-node/src/actions/upload-epoch-proof-failure.ts b/yarn-project/prover-node/src/actions/upload-epoch-proof-failure.ts index 7717ac9cebfb..3643f5d5869c 100644 --- a/yarn-project/prover-node/src/actions/upload-epoch-proof-failure.ts +++ b/yarn-project/prover-node/src/actions/upload-epoch-proof-failure.ts @@ -3,11 +3,11 @@ import { tryRmDir } from '@aztec/foundation/fs'; import { jsonStringify } from '@aztec/foundation/json-rpc'; import type { Logger } from '@aztec/foundation/log'; import { isoDate } from '@aztec/foundation/string'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { buildSnapshotMetadata, createBackups } from '@aztec/node-lib/actions'; import type { ChainConfig } from '@aztec/stdlib/config'; import { type FileStore, createFileStore } from '@aztec/stdlib/file-store'; import type { WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { type UploadSnapshotMetadata, getBasePath, uploadSnapshotData } from '@aztec/stdlib/snapshots'; import { WORLD_STATE_DB_VERSION } from '@aztec/world-state'; diff --git a/yarn-project/prover-node/src/config.ts b/yarn-project/prover-node/src/config.ts index 9ba657d77f0a..3a0919845bac 100644 --- a/yarn-project/prover-node/src/config.ts +++ b/yarn-project/prover-node/src/config.ts @@ -6,7 +6,6 @@ import { numberConfigHelper, pickConfigMappings, } from '@aztec/foundation/config'; -import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { type KeyStoreConfig, keyStoreConfigMappings } from '@aztec/node-keystore/config'; import { ethPrivateKeySchema } from '@aztec/node-keystore/schemas'; import type { KeyStore } from '@aztec/node-keystore/types'; @@ -24,6 +23,7 @@ import { proverPublisherConfigMappings, proverTxSenderConfigMappings, } from '@aztec/sequencer-client/config'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; export type ProverNodeConfig = ProverClientUserConfig & ProverPublisherConfig & diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 8adf039d7651..aeb07e70587f 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -7,7 +7,6 @@ import type { Fr } from '@aztec/foundation/curves/bn254'; import { memoize } from '@aztec/foundation/decorators'; import { createLogger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { PublicProcessorFactory } from '@aztec/simulator/server'; import type { L2BlockSource } from '@aztec/stdlib/block'; import type { Checkpoint } from '@aztec/stdlib/checkpoint'; @@ -24,6 +23,7 @@ import { type WorldStateSynchronizer, tryStop, } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import type { Tx } from '@aztec/stdlib/tx'; import { diff --git a/yarn-project/pxe/src/config/index.ts b/yarn-project/pxe/src/config/index.ts index 43562d65483c..4f7ac29d3b9a 100644 --- a/yarn-project/pxe/src/config/index.ts +++ b/yarn-project/pxe/src/config/index.ts @@ -5,8 +5,8 @@ import { numberConfigHelper, parseBooleanEnv, } from '@aztec/foundation/config'; -import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { type ChainConfig, chainConfigMappings } from '@aztec/stdlib/config'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; export { getPackageInfo } from './package_info.js'; diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts index 254485252ec5..a01c498a8969 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts @@ -257,6 +257,7 @@ describe('CheckpointVoter HA Integration', () => { signingTimeoutMs: 3000, maxStuckDutiesAgeMs: 72000, databaseUrl: 'postgresql://test', + dataStoreMapSizeKb: 128 * 1024 * 1024, }; // Create HA signer with pglite pool diff --git a/yarn-project/slasher/src/factory/create_facade.ts b/yarn-project/slasher/src/factory/create_facade.ts index 6787fc65ce75..0a2010acac01 100644 --- a/yarn-project/slasher/src/factory/create_facade.ts +++ b/yarn-project/slasher/src/factory/create_facade.ts @@ -7,10 +7,10 @@ import { unique } from '@aztec/foundation/collection'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { createStore } from '@aztec/kv-store/lmdb-v2'; import { getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers'; import type { SlasherConfig } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { SlasherClientFacade } from '../slasher_client_facade.js'; import type { SlasherClientInterface } from '../slasher_client_interface.js'; diff --git a/yarn-project/slasher/src/factory/create_implementation.ts b/yarn-project/slasher/src/factory/create_implementation.ts index 0c6eb8ce6d76..cf68df692af2 100644 --- a/yarn-project/slasher/src/factory/create_implementation.ts +++ b/yarn-project/slasher/src/factory/create_implementation.ts @@ -9,9 +9,9 @@ import type { SlotNumber } from '@aztec/foundation/branded-types'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { AztecLMDBStoreV2 } from '@aztec/kv-store/lmdb-v2'; import type { SlasherConfig } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { SlashFactoryContract } from '@aztec/stdlib/l1-contracts'; import { EmpireSlasherClient, type EmpireSlasherSettings } from '../empire_slasher_client.js'; diff --git a/yarn-project/slasher/src/slasher_client_facade.ts b/yarn-project/slasher/src/slasher_client_facade.ts index 0ef4a677ac0a..d07cb7d74e40 100644 --- a/yarn-project/slasher/src/slasher_client_facade.ts +++ b/yarn-project/slasher/src/slasher_client_facade.ts @@ -5,9 +5,9 @@ import type { SlotNumber } from '@aztec/foundation/branded-types'; import { EthAddress } from '@aztec/foundation/eth-address'; import { createLogger } from '@aztec/foundation/log'; import { DateProvider } from '@aztec/foundation/timer'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import { AztecLMDBStoreV2 } from '@aztec/kv-store/lmdb-v2'; import type { SlasherConfig } from '@aztec/stdlib/interfaces/server'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import type { Offense, ProposerSlashAction, SlashPayloadRound } from '@aztec/stdlib/slashing'; import { createSlasherImplementation } from './factory/create_implementation.js'; diff --git a/yarn-project/stdlib/package.json b/yarn-project/stdlib/package.json index ede741f4ca4e..9fcae1f5e6d4 100644 --- a/yarn-project/stdlib/package.json +++ b/yarn-project/stdlib/package.json @@ -63,7 +63,8 @@ "./slashing": "./dest/slashing/index.js", "./l1-contracts": "./dest/l1-contracts/index.js", "./world-state": "./dest/world-state/index.js", - "./timetable": "./dest/timetable/index.js" + "./timetable": "./dest/timetable/index.js", + "./kv-store": "./dest/kv-store/index.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/stdlib/src/ha-signing/config.ts b/yarn-project/stdlib/src/ha-signing/config.ts index a766fac15e99..69107f687be0 100644 --- a/yarn-project/stdlib/src/ha-signing/config.ts +++ b/yarn-project/stdlib/src/ha-signing/config.ts @@ -13,14 +13,9 @@ import type { ZodFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; /** - * Configuration for the Validator HA Signer - * - * This config is used for distributed locking and slashing protection - * when running multiple validator nodes in a high-availability setup. + * Base signing protection configuration shared by both HA (Postgres) and local (LMDB) signers. */ -export interface ValidatorHASignerConfig { - /** Whether HA signing / slashing protection is enabled */ - haSigningEnabled: boolean; +export interface BaseSignerConfig { /** L1 contract addresses (rollup address required) */ l1Contracts: Pick; /** Unique identifier for this node */ @@ -33,30 +28,9 @@ export interface ValidatorHASignerConfig { maxStuckDutiesAgeMs?: number; /** Optional: clean up old duties after this many hours (disabled if not set) */ cleanupOldDutiesAfterHours?: number; - /** - * PostgreSQL connection string - * Format: postgresql://user:password@host:port/database - */ - databaseUrl?: string; - /** - * PostgreSQL connection pool configuration - */ - /** Maximum number of clients in the pool (default: 10) */ - poolMaxCount?: number; - /** Minimum number of clients in the pool (default: 0) */ - poolMinCount?: number; - /** Idle timeout in milliseconds (default: 10000) */ - poolIdleTimeoutMs?: number; - /** Connection timeout in milliseconds (default: 0, no timeout) */ - poolConnectionTimeoutMs?: number; } -export const validatorHASignerConfigMappings: ConfigMappingsType = { - haSigningEnabled: { - env: 'VALIDATOR_HA_SIGNING_ENABLED', - description: 'Whether HA signing / slashing protection is enabled', - ...booleanConfigHelper(false), - }, +export const baseSignerConfigMappings: ConfigMappingsType = { l1Contracts: { description: 'L1 contract addresses (rollup address required)', nested: { @@ -91,6 +65,47 @@ export const validatorHASignerConfigMappings: ConfigMappingsType; + +/** + * Configuration for the Validator HA Signer. + * + * Extends BaseSignerConfig with a flag to enable HA mode and Postgres connection settings. + */ +export interface ValidatorHASignerConfig extends BaseSignerConfig { + /** Whether HA signing / slashing protection is enabled */ + haSigningEnabled: boolean; + /** + * PostgreSQL connection string + * Format: postgresql://user:password@host:port/database + */ + databaseUrl?: string; + /** Maximum number of clients in the pool (default: 10) */ + poolMaxCount?: number; + /** Minimum number of clients in the pool (default: 0) */ + poolMinCount?: number; + /** Idle timeout in milliseconds (default: 10000) */ + poolIdleTimeoutMs?: number; + /** Connection timeout in milliseconds (default: 0, no timeout) */ + poolConnectionTimeoutMs?: number; +} + +export const validatorHASignerConfigMappings: ConfigMappingsType = { + ...baseSignerConfigMappings, + haSigningEnabled: { + env: 'VALIDATOR_HA_SIGNING_ENABLED', + description: 'Whether HA signing / slashing protection is enabled', + ...booleanConfigHelper(false), + }, databaseUrl: { env: 'VALIDATOR_HA_DATABASE_URL', description: @@ -131,16 +146,8 @@ export function getConfigEnvVars(): ValidatorHASignerConfig { return getConfigFromMappings(validatorHASignerConfigMappings); } -export const ValidatorHASignerConfigSchema = z.object({ +export const ValidatorHASignerConfigSchema = BaseSignerConfigSchema.extend({ haSigningEnabled: z.boolean(), - l1Contracts: z.object({ - rollupAddress: z.instanceof(EthAddress), - }), - nodeId: z.string(), - pollingIntervalMs: z.number().min(0), - signingTimeoutMs: z.number().min(0), - maxStuckDutiesAgeMs: z.number().min(0).optional(), - cleanupOldDutiesAfterHours: z.number().min(0).optional(), databaseUrl: z.string().optional(), poolMaxCount: z.number().min(0).optional(), poolMinCount: z.number().min(0).optional(), diff --git a/yarn-project/stdlib/src/ha-signing/index.ts b/yarn-project/stdlib/src/ha-signing/index.ts index 7daa9c9ccdd0..4d29ef548ce3 100644 --- a/yarn-project/stdlib/src/ha-signing/index.ts +++ b/yarn-project/stdlib/src/ha-signing/index.ts @@ -1,10 +1,19 @@ export { + type BaseSignerConfig, + BaseSignerConfigSchema, + baseSignerConfigMappings, type ValidatorHASignerConfig, ValidatorHASignerConfigSchema, defaultValidatorHASignerConfig, getConfigEnvVars, validatorHASignerConfigMappings, } from './config.js'; +export { + type LocalSignerConfig, + LocalSignerConfigSchema, + getLocalSignerConfigEnvVars, + localSignerConfigMappings, +} from './local_config.js'; export { DutyType, type BlockProposalSigningContext, diff --git a/yarn-project/stdlib/src/ha-signing/local_config.ts b/yarn-project/stdlib/src/ha-signing/local_config.ts new file mode 100644 index 000000000000..48079af2b137 --- /dev/null +++ b/yarn-project/stdlib/src/ha-signing/local_config.ts @@ -0,0 +1,46 @@ +import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; +import { zodFor } from '@aztec/foundation/schemas'; +import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; + +import { z } from 'zod'; + +import { type BaseSignerConfig, BaseSignerConfigSchema, baseSignerConfigMappings } from './config.js'; + +/** + * Configuration for local (single-node) slashing protection. + * + * Combines the base signing protection fields (shared with HA mode) with + * DataStoreConfig for the local LMDB backing store, plus a per-store map-size + * override. Used when HA signing is disabled. + */ +export type LocalSignerConfig = BaseSignerConfig & + DataStoreConfig & { + /** Maximum size of the local signing-protection LMDB store in KB. Overwrites the general dataStoreMapSizeKb. */ + signingProtectionMapSizeKb?: number; + }; + +export const localSignerConfigMappings: ConfigMappingsType = { + ...baseSignerConfigMappings, + ...dataConfigMappings, + signingProtectionMapSizeKb: { + env: 'SIGNING_PROTECTION_MAP_SIZE_KB', + description: + 'Maximum size of the local signing-protection LMDB store in KB. Overwrites the general dataStoreMapSizeKb.', + parseEnv: (val: string | undefined) => (val ? +val : undefined), + }, +}; + +export const LocalSignerConfigSchema = zodFor()( + BaseSignerConfigSchema.extend({ + dataDirectory: z.string().optional(), + dataStoreMapSizeKb: z.number(), + signingProtectionMapSizeKb: z.number().optional(), + }), +); + +/** + * Returns the local signer configuration from environment variables. + */ +export function getLocalSignerConfigEnvVars(): LocalSignerConfig { + return getConfigFromMappings(localSignerConfigMappings); +} diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts index 61014496fe1e..85384db62626 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts @@ -176,6 +176,7 @@ class MockAztecNodeAdmin implements AztecNodeAdmin { pollingIntervalMs: 50, signingTimeoutMs: 3000, maxStuckDutiesAgeMs: 72000, + dataStoreMapSizeKb: 128 * 1024 * 1024, l1Contracts: { rollupAddress: EthAddress.random(), }, diff --git a/yarn-project/stdlib/src/interfaces/validator.ts b/yarn-project/stdlib/src/interfaces/validator.ts index f0b1c05e2209..040562ce896c 100644 --- a/yarn-project/stdlib/src/interfaces/validator.ts +++ b/yarn-project/stdlib/src/interfaces/validator.ts @@ -20,58 +20,64 @@ import type { PeerId } from '@libp2p/interface'; import { z } from 'zod'; import type { CommitteeAttestationsAndSigners } from '../block/index.js'; -import { type ValidatorHASignerConfig, ValidatorHASignerConfigSchema } from '../ha-signing/index.js'; +import { + type LocalSignerConfig, + LocalSignerConfigSchema, + type ValidatorHASignerConfig, + ValidatorHASignerConfigSchema, +} from '../ha-signing/index.js'; import { AllowedElementSchema } from './allowed_element.js'; /** * Validator client configuration */ -export type ValidatorClientConfig = ValidatorHASignerConfig & { - /** The private keys of the validators participating in attestation duties */ - validatorPrivateKeys?: SecretValue<`0x${string}`[]>; +export type ValidatorClientConfig = ValidatorHASignerConfig & + LocalSignerConfig & { + /** The private keys of the validators participating in attestation duties */ + validatorPrivateKeys?: SecretValue<`0x${string}`[]>; - /** The addresses of the validators to use with remote signers */ - validatorAddresses?: EthAddress[]; + /** The addresses of the validators to use with remote signers */ + validatorAddresses?: EthAddress[]; - /** Do not run the validator */ - disableValidator: boolean; + /** Do not run the validator */ + disableValidator: boolean; - /** Temporarily disable these specific validator addresses */ - disabledValidators: EthAddress[]; + /** Temporarily disable these specific validator addresses */ + disabledValidators: EthAddress[]; - /** Interval between polling for new attestations from peers */ - attestationPollingIntervalMs: number; + /** Interval between polling for new attestations from peers */ + attestationPollingIntervalMs: number; - /** Whether to re-execute transactions in a block proposal before attesting */ - validatorReexecute: boolean; + /** Whether to re-execute transactions in a block proposal before attesting */ + validatorReexecute: boolean; - /** Whether to always reexecute block proposals, even for non-validator nodes or when out of the currnet committee */ - alwaysReexecuteBlockProposals?: boolean; + /** Whether to always reexecute block proposals, even for non-validator nodes or when out of the currnet committee */ + alwaysReexecuteBlockProposals?: boolean; - /** Whether to run in fisherman mode: validates all proposals and attestations but does not broadcast attestations or participate in consensus */ - fishermanMode?: boolean; + /** Whether to run in fisherman mode: validates all proposals and attestations but does not broadcast attestations or participate in consensus */ + fishermanMode?: boolean; - /** Skip checkpoint proposal validation and always attest (default: false) */ - skipCheckpointProposalValidation?: boolean; + /** Skip checkpoint proposal validation and always attest (default: false) */ + skipCheckpointProposalValidation?: boolean; - /** Skip pushing re-executed blocks to archiver (default: false) */ - skipPushProposedBlocksToArchiver?: boolean; + /** Skip pushing re-executed blocks to archiver (default: false) */ + skipPushProposedBlocksToArchiver?: boolean; - /** Agree to attest to equivocated checkpoint proposals (for testing purposes only) */ - attestToEquivocatedProposals?: boolean; + /** Agree to attest to equivocated checkpoint proposals (for testing purposes only) */ + attestToEquivocatedProposals?: boolean; - /** Maximum L2 gas per block for validation. Proposals exceeding this limit are rejected. */ - validateMaxL2BlockGas?: number; + /** Maximum L2 gas per block for validation. Proposals exceeding this limit are rejected. */ + validateMaxL2BlockGas?: number; - /** Maximum DA gas per block for validation. Proposals exceeding this limit are rejected. */ - validateMaxDABlockGas?: number; + /** Maximum DA gas per block for validation. Proposals exceeding this limit are rejected. */ + validateMaxDABlockGas?: number; - /** Maximum transactions per block for validation. Proposals exceeding this limit are rejected. */ - validateMaxTxsPerBlock?: number; + /** Maximum transactions per block for validation. Proposals exceeding this limit are rejected. */ + validateMaxTxsPerBlock?: number; - /** Maximum transactions per checkpoint for validation. Proposals exceeding this limit are rejected. */ - validateMaxTxsPerCheckpoint?: number; -}; + /** Maximum transactions per checkpoint for validation. Proposals exceeding this limit are rejected. */ + validateMaxTxsPerCheckpoint?: number; + }; export type ValidatorClientFullConfig = ValidatorClientConfig & Pick & @@ -87,7 +93,7 @@ export type ValidatorClientFullConfig = ValidatorClientConfig & }; export const ValidatorClientConfigSchema = zodFor>()( - ValidatorHASignerConfigSchema.extend({ + ValidatorHASignerConfigSchema.merge(LocalSignerConfigSchema).extend({ validatorAddresses: z.array(schemas.EthAddress).optional(), disableValidator: z.boolean(), disabledValidators: z.array(schemas.EthAddress), diff --git a/yarn-project/kv-store/src/config.ts b/yarn-project/stdlib/src/kv-store/config.ts similarity index 97% rename from yarn-project/kv-store/src/config.ts rename to yarn-project/stdlib/src/kv-store/config.ts index 0d09f1289fb2..46ef09e6211a 100644 --- a/yarn-project/kv-store/src/config.ts +++ b/yarn-project/stdlib/src/kv-store/config.ts @@ -3,7 +3,7 @@ import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } fr import type { EthAddress } from '@aztec/foundation/eth-address'; export type DataStoreConfig = { - dataDirectory: string | undefined; + dataDirectory?: string; dataStoreMapSizeKb: number; l1Contracts?: { rollupAddress: EthAddress }; }; diff --git a/yarn-project/stdlib/src/kv-store/index.ts b/yarn-project/stdlib/src/kv-store/index.ts new file mode 100644 index 000000000000..3b3c5bf0ba45 --- /dev/null +++ b/yarn-project/stdlib/src/kv-store/index.ts @@ -0,0 +1 @@ +export * from './config.js'; diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 727a4d4b2648..9b36eae81838 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -6,7 +6,7 @@ import { secretValueConfigHelper, } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { validatorHASignerConfigMappings } from '@aztec/stdlib/ha-signing'; +import { localSignerConfigMappings, validatorHASignerConfigMappings } from '@aztec/stdlib/ha-signing'; import type { ValidatorClientConfig } from '@aztec/stdlib/interfaces/server'; export type { ValidatorClientConfig }; @@ -97,6 +97,7 @@ export const validatorClientConfigMappings: ConfigMappingsType (val ? parseInt(val, 10) : undefined), }, + ...localSignerConfigMappings, ...validatorHASignerConfigMappings, }; diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts index 80c7bd532974..1bf1fbd5229b 100644 --- a/yarn-project/validator-client/src/validator.ha.integration.test.ts +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -140,6 +140,7 @@ describe('ValidatorClient HA Integration', () => { signingTimeoutMs: 3000, maxStuckDutiesAgeMs: 72000, databaseUrl: 'postgresql://test', + dataStoreMapSizeKb: 128 * 1024 * 1024, }; // Create 5 validator nodes with unique node IDs diff --git a/yarn-project/validator-client/src/validator.integration.test.ts b/yarn-project/validator-client/src/validator.integration.test.ts index 967f3da9e042..58a2520fbf3b 100644 --- a/yarn-project/validator-client/src/validator.integration.test.ts +++ b/yarn-project/validator-client/src/validator.integration.test.ts @@ -175,6 +175,7 @@ describe('ValidatorClient Integration', () => { haSigningEnabled: false, skipCheckpointProposalValidation: false, skipPushProposedBlocksToArchiver: false, + dataStoreMapSizeKb: 128 * 1024, nodeId: 'test-node', pollingIntervalMs: 100, signingTimeoutMs: 3000, diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index 14799f855c4b..a8ad58ee8b9f 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -154,6 +154,7 @@ describe('ValidatorClient', () => { pollingIntervalMs: 1000, signingTimeoutMs: 1000, maxStuckDutiesAgeMs: 72000, + dataStoreMapSizeKb: 1024 * 1024, }; keyStoreManager = new KeystoreManager(makeKeyStore({ attester: validatorPrivateKeys.map(key => key as Hex<32>) })); @@ -1045,7 +1046,7 @@ describe('ValidatorClient', () => { it('should preserve HA signer and wrap new adapter in HAKeyStore after reload', () => { // Simulate HA mode by setting the haSigner and wrapping in HAKeyStore const mockHASigner = { nodeId: 'test-ha-node' }; - (validatorClient as any).haSigner = mockHASigner; + (validatorClient as any).slashingProtectionSigner = mockHASigner; (validatorClient as any).keyStore = haKeyStore; const newCoinbase = EthAddress.random(); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index f60f2277eac7..baec6d4f2317 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -46,7 +46,7 @@ import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, CheckpointGlobalVariables, Tx } from '@aztec/stdlib/tx'; import { AttestationTimeoutError } from '@aztec/stdlib/validators'; import { type TelemetryClient, type Tracer, getTelemetryClient } from '@aztec/telemetry-client'; -import { createHASigner } from '@aztec/validator-ha-signer/factory'; +import { createHASigner, createLocalSignerWithProtection } from '@aztec/validator-ha-signer/factory'; import { DutyType, type SigningContext } from '@aztec/validator-ha-signer/types'; import type { ValidatorHASigner } from '@aztec/validator-ha-signer/validator-ha-signer'; @@ -109,7 +109,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) private l1ToL2MessageSource: L1ToL2MessageSource, private config: ValidatorClientFullConfig, private blobClient: BlobClientInterface, - private haSigner: ValidatorHASigner | undefined, + private slashingProtectionSigner: ValidatorHASigner, private dateProvider: DateProvider = new DateProvider(), telemetry: TelemetryClient = getTelemetryClient(), log = createLogger('validator'), @@ -218,18 +218,27 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) ); const nodeKeystoreAdapter = NodeKeystoreAdapter.fromKeyStoreManager(keyStoreManager); - let validatorKeyStore: ExtendedValidatorKeyStore = nodeKeystoreAdapter; - let haSigner: ValidatorHASigner | undefined; + let slashingProtectionSigner: ValidatorHASigner; if (config.haSigningEnabled) { + // Multi-node HA mode: use PostgreSQL-backed distributed locking. // If maxStuckDutiesAgeMs is not explicitly set, compute it from Aztec slot duration const haConfig = { ...config, maxStuckDutiesAgeMs: config.maxStuckDutiesAgeMs ?? epochCache.getL1Constants().slotDuration * 2 * 1000, }; - const { signer } = await createHASigner(haConfig, { telemetryClient: telemetry, dateProvider }); - haSigner = signer; - validatorKeyStore = new HAKeyStore(nodeKeystoreAdapter, signer); + ({ signer: slashingProtectionSigner } = await createHASigner(haConfig, { + telemetryClient: telemetry, + dateProvider, + })); + } else { + // Single-node mode: use LMDB-backed local signing protection. + // This prevents double-signing if the node crashes and restarts mid-proposal. + ({ signer: slashingProtectionSigner } = await createLocalSignerWithProtection(config, { + telemetryClient: telemetry, + dateProvider, + })); } + const validatorKeyStore: ExtendedValidatorKeyStore = new HAKeyStore(nodeKeystoreAdapter, slashingProtectionSigner); const validator = new ValidatorClient( validatorKeyStore, @@ -242,7 +251,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) l1ToL2MessageSource, config, blobClient, - haSigner, + slashingProtectionSigner, dateProvider, telemetry, ); @@ -281,24 +290,8 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } public reloadKeystore(newManager: KeystoreManager): void { - if (this.config.haSigningEnabled && !this.haSigner) { - this.log.warn( - 'HA signing is enabled in config but was not initialized at startup. ' + - 'Restart the node to enable HA signing.', - ); - } else if (!this.config.haSigningEnabled && this.haSigner) { - this.log.warn( - 'HA signing was disabled via config update but the HA signer is still active. ' + - 'Restart the node to fully disable HA signing.', - ); - } - const newAdapter = NodeKeystoreAdapter.fromKeyStoreManager(newManager); - if (this.haSigner) { - this.keyStore = new HAKeyStore(newAdapter, this.haSigner); - } else { - this.keyStore = newAdapter; - } + this.keyStore = new HAKeyStore(newAdapter, this.slashingProtectionSigner); this.validationService = new ValidationService(this.keyStore, this.log.createChild('validation-service')); } diff --git a/yarn-project/validator-ha-signer/README.md b/yarn-project/validator-ha-signer/README.md index 3969f709e34f..3ff3fafafae5 100644 --- a/yarn-project/validator-ha-signer/README.md +++ b/yarn-project/validator-ha-signer/README.md @@ -36,7 +36,6 @@ import { createHASigner } from '@aztec/validator-ha-signer/factory'; const { signer, db } = await createHASigner({ databaseUrl: process.env.DATABASE_URL, - haSigningEnabled: true, nodeId: 'validator-node-1', pollingIntervalMs: 100, signingTimeoutMs: 3000, @@ -81,7 +80,6 @@ const db = new PostgresSlashingProtectionDatabase(pool); await db.initialize(); const signer = new ValidatorHASigner(db, { - haSigningEnabled: true, nodeId: 'validator-node-1', pollingIntervalMs: 100, signingTimeoutMs: 3000, diff --git a/yarn-project/validator-ha-signer/package.json b/yarn-project/validator-ha-signer/package.json index 0b9a575c21ac..fbbb272e6f9d 100644 --- a/yarn-project/validator-ha-signer/package.json +++ b/yarn-project/validator-ha-signer/package.json @@ -11,7 +11,8 @@ "./slashing-protection-service": "./dest/slashing_protection_service.js", "./types": "./dest/types.js", "./validator-ha-signer": "./dest/validator_ha_signer.js", - "./test": "./dest/test/pglite_pool.js" + "./test": "./dest/test/pglite_pool.js", + "./db/lmdb": "./dest/db/lmdb.js" }, "typedocOptions": { "entryPoints": [ @@ -76,6 +77,7 @@ "dependencies": { "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:^", "@aztec/stdlib": "workspace:^", "@aztec/telemetry-client": "workspace:^", "node-pg-migrate": "^8.0.4", diff --git a/yarn-project/validator-ha-signer/src/db/index.ts b/yarn-project/validator-ha-signer/src/db/index.ts index ea026be796cb..a8cd48407152 100644 --- a/yarn-project/validator-ha-signer/src/db/index.ts +++ b/yarn-project/validator-ha-signer/src/db/index.ts @@ -1,3 +1,4 @@ export * from './types.js'; export * from './schema.js'; export * from './postgres.js'; +export * from './lmdb.js'; diff --git a/yarn-project/validator-ha-signer/src/db/lmdb.test.ts b/yarn-project/validator-ha-signer/src/db/lmdb.test.ts new file mode 100644 index 000000000000..c7ad2a9cb5fe --- /dev/null +++ b/yarn-project/validator-ha-signer/src/db/lmdb.test.ts @@ -0,0 +1,417 @@ +import { BlockNumber, IndexWithinCheckpoint, SlotNumber } from '@aztec/foundation/branded-types'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { TestDateProvider } from '@aztec/foundation/timer'; +import { type AztecLMDBStoreV2, openStoreAt, openTmpStore } from '@aztec/kv-store/lmdb-v2'; + +import { afterEach, beforeEach, describe, expect, it } from '@jest/globals'; +import { mkdir, mkdtemp, rm } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { LmdbSlashingProtectionDatabase } from './lmdb.js'; +import { DutyStatus, DutyType } from './types.js'; + +describe('LmdbSlashingProtectionDatabase', () => { + let store: AztecLMDBStoreV2; + let db: LmdbSlashingProtectionDatabase; + let dateProvider: TestDateProvider; + + const ROLLUP_ADDRESS = EthAddress.random(); + const VALIDATOR_ADDRESS = EthAddress.random(); + const SLOT = SlotNumber(100); + const BLOCK_NUMBER = BlockNumber(50); + const BLOCK_INDEX = IndexWithinCheckpoint(0); + const DUTY_TYPE = DutyType.BLOCK_PROPOSAL; + const MESSAGE_HASH = '0xdeadbeef'; + const NODE_ID = 'local'; + const SIGNATURE = '0xsignature'; + + const defaultParams = () => ({ + rollupAddress: ROLLUP_ADDRESS, + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX, + dutyType: DUTY_TYPE, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + beforeEach(async () => { + store = await openTmpStore('lmdb-slashing-test', true); + dateProvider = new TestDateProvider(); + db = new LmdbSlashingProtectionDatabase(store, dateProvider); + }); + + afterEach(async () => { + await db.close(); + }); + + describe('tryInsertOrGetExisting', () => { + it('should insert a new duty and return isNew=true', async () => { + const result = await db.tryInsertOrGetExisting(defaultParams()); + + expect(result.isNew).toBe(true); + expect(result.record.status).toBe(DutyStatus.SIGNING); + expect(result.record.rollupAddress.equals(ROLLUP_ADDRESS)).toBe(true); + expect(result.record.validatorAddress.equals(VALIDATOR_ADDRESS)).toBe(true); + expect(result.record.slot).toBe(SLOT); + expect(result.record.dutyType).toBe(DUTY_TYPE); + expect(result.record.messageHash).toBe(MESSAGE_HASH); + expect(result.record.nodeId).toBe(NODE_ID); + expect(result.record.lockToken).toBeTruthy(); + }); + + it('should return isNew=false and existing record on duplicate', async () => { + const first = await db.tryInsertOrGetExisting(defaultParams()); + expect(first.isNew).toBe(true); + + const second = await db.tryInsertOrGetExisting({ ...defaultParams(), nodeId: 'other-node' }); + expect(second.isNew).toBe(false); + expect(second.record.nodeId).toBe(NODE_ID); + }); + + it('should strip lockToken from existing record', async () => { + const first = await db.tryInsertOrGetExisting(defaultParams()); + expect(first.isNew).toBe(true); + expect(first.record.lockToken).toBeTruthy(); + + const second = await db.tryInsertOrGetExisting({ ...defaultParams(), nodeId: 'other-node' }); + expect(second.isNew).toBe(false); + expect(second.record.lockToken).toBe(''); + }); + + it('should allow independent duties for different slots, duty types, and validators', async () => { + const [slot1, slot2] = await Promise.all([ + db.tryInsertOrGetExisting({ ...defaultParams(), slot: SlotNumber(1) }), + db.tryInsertOrGetExisting({ ...defaultParams(), slot: SlotNumber(2) }), + ]); + expect(slot1.isNew).toBe(true); + expect(slot2.isNew).toBe(true); + + const [proposalResult, attestResult] = await Promise.all([ + db.tryInsertOrGetExisting({ ...defaultParams(), dutyType: DutyType.BLOCK_PROPOSAL }), + db.tryInsertOrGetExisting({ + rollupAddress: ROLLUP_ADDRESS, + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + dutyType: DutyType.ATTESTATION, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }), + ]); + expect(proposalResult.isNew).toBe(true); + expect(attestResult.isNew).toBe(true); + + const [v1, v2] = await Promise.all([ + db.tryInsertOrGetExisting({ ...defaultParams(), validatorAddress: EthAddress.random() }), + db.tryInsertOrGetExisting({ ...defaultParams(), validatorAddress: EthAddress.random() }), + ]); + expect(v1.isNew).toBe(true); + expect(v2.isNew).toBe(true); + }); + }); + + describe('updateDutySigned', () => { + it('should update duty to signed status with correct lockToken', async () => { + const { record } = await db.tryInsertOrGetExisting(defaultParams()); + + const success = await db.updateDutySigned( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + SIGNATURE, + record.lockToken, + BLOCK_INDEX, + ); + + expect(success).toBe(true); + + // Subsequent insert attempt returns existing record with SIGNED status + const second = await db.tryInsertOrGetExisting(defaultParams()); + expect(second.isNew).toBe(false); + expect(second.record.status).toBe(DutyStatus.SIGNED); + expect(second.record.signature).toBe(SIGNATURE); + }); + + it('should return false when lockToken does not match', async () => { + await db.tryInsertOrGetExisting(defaultParams()); + + const success = await db.updateDutySigned( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + SIGNATURE, + 'wrong-token', + BLOCK_INDEX, + ); + + expect(success).toBe(false); + }); + + it('should return false when duty does not exist', async () => { + const success = await db.updateDutySigned( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + SIGNATURE, + 'any-token', + BLOCK_INDEX, + ); + + expect(success).toBe(false); + }); + }); + + describe('deleteDuty', () => { + it('should delete duty with correct lockToken', async () => { + const { record } = await db.tryInsertOrGetExisting(defaultParams()); + + const success = await db.deleteDuty( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + record.lockToken, + BLOCK_INDEX, + ); + + expect(success).toBe(true); + + // Should now be insertable again + const retry = await db.tryInsertOrGetExisting(defaultParams()); + expect(retry.isNew).toBe(true); + }); + + it('should return false when lockToken does not match', async () => { + await db.tryInsertOrGetExisting(defaultParams()); + + const success = await db.deleteDuty( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + 'wrong-token', + BLOCK_INDEX, + ); + + expect(success).toBe(false); + }); + + it('should return false when duty does not exist', async () => { + const success = await db.deleteDuty(ROLLUP_ADDRESS, VALIDATOR_ADDRESS, SLOT, DUTY_TYPE, 'any-token', BLOCK_INDEX); + + expect(success).toBe(false); + }); + }); + + describe('cleanupOwnStuckDuties', () => { + it('should remove stuck SIGNING duties older than maxAgeMs', async () => { + await db.tryInsertOrGetExisting(defaultParams()); + + // Advance past the maxAge threshold + dateProvider.advanceTime(120); + const count = await db.cleanupOwnStuckDuties(NODE_ID, 60_000); + expect(count).toBe(1); + + // Should now be insertable again (duty was deleted) + const retry = await db.tryInsertOrGetExisting(defaultParams()); + expect(retry.isNew).toBe(true); + }); + + it('should not remove duties for other node IDs', async () => { + await db.tryInsertOrGetExisting(defaultParams()); + + dateProvider.advanceTime(120); + const count = await db.cleanupOwnStuckDuties('different-node', 60_000); + expect(count).toBe(0); + }); + + it('should not remove SIGNED duties', async () => { + const { record } = await db.tryInsertOrGetExisting(defaultParams()); + await db.updateDutySigned( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + SIGNATURE, + record.lockToken, + BLOCK_INDEX, + ); + + dateProvider.advanceTime(120); + const count = await db.cleanupOwnStuckDuties(NODE_ID, 60_000); + expect(count).toBe(0); + }); + + it('should not remove fresh SIGNING duties within maxAgeMs', async () => { + await db.tryInsertOrGetExisting(defaultParams()); + + dateProvider.advanceTime(30); + const count = await db.cleanupOwnStuckDuties(NODE_ID, 60_000); + expect(count).toBe(0); + }); + }); + + describe('cleanupOutdatedRollupDuties', () => { + it('is always a no-op: rollup address changes are handled at startup by DatabaseVersionManager', async () => { + await db.tryInsertOrGetExisting(defaultParams()); + + const differentRollup = EthAddress.random(); + const count = await db.cleanupOutdatedRollupDuties(differentRollup); + expect(count).toBe(0); + }); + }); + + describe('cleanupOldDuties', () => { + it('should remove old SIGNED duties', async () => { + const { record } = await db.tryInsertOrGetExisting(defaultParams()); + await db.updateDutySigned( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + SIGNATURE, + record.lockToken, + BLOCK_INDEX, + ); + + dateProvider.advanceTime(120); + const count = await db.cleanupOldDuties(60_000); + expect(count).toBe(1); + }); + + it('should not remove SIGNING duties', async () => { + await db.tryInsertOrGetExisting(defaultParams()); + + dateProvider.advanceTime(120); + const count = await db.cleanupOldDuties(60_000); + expect(count).toBe(0); + }); + + it('should not remove fresh SIGNED duties within maxAgeMs', async () => { + const { record } = await db.tryInsertOrGetExisting(defaultParams()); + await db.updateDutySigned( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + SIGNATURE, + record.lockToken, + BLOCK_INDEX, + ); + + dateProvider.advanceTime(30); + const count = await db.cleanupOldDuties(60_000); + expect(count).toBe(0); + }); + }); +}); + +/** + * Restart-persistence tests. + * + * These tests verify the core motivation for the local LMDB slashing protection: + * a sequencer that sends a block/checkpoint proposal and then restarts must NOT + * send a duplicate proposal for the same slot. + */ +describe('LmdbSlashingProtectionDatabase - persistence across restarts', () => { + const ROLLUP_ADDRESS = EthAddress.random(); + const VALIDATOR_ADDRESS = EthAddress.random(); + const SLOT = SlotNumber(100); + const BLOCK_NUMBER = BlockNumber(50); + const BLOCK_INDEX = IndexWithinCheckpoint(0); + const DUTY_TYPE = DutyType.BLOCK_PROPOSAL; + const MESSAGE_HASH = '0xdeadbeef'; + const NODE_ID = 'local'; + const SIGNATURE = '0xsignature'; + + const defaultParams = () => ({ + rollupAddress: ROLLUP_ADDRESS, + validatorAddress: VALIDATOR_ADDRESS, + slot: SLOT, + blockNumber: BLOCK_NUMBER, + blockIndexWithinCheckpoint: BLOCK_INDEX, + dutyType: DUTY_TYPE, + messageHash: MESSAGE_HASH, + nodeId: NODE_ID, + }); + + let dataDir: string; + let dateProvider: TestDateProvider; + + beforeEach(async () => { + dataDir = await mkdtemp(join(tmpdir(), 'lmdb-slashing-restart-')); + await mkdir(dataDir, { recursive: true }); + dateProvider = new TestDateProvider(); + }); + + afterEach(async () => { + await rm(dataDir, { recursive: true, force: true, maxRetries: 3, retryDelay: 100 }); + }); + + const openDb = async () => { + const store = await openStoreAt(dataDir); + return { store, db: new LmdbSlashingProtectionDatabase(store, dateProvider) }; + }; + + it('should block duplicate block proposals after a node restart', async () => { + // First run: node signs a block proposal and records it successfully. + const { db: db1 } = await openDb(); + const { record } = await db1.tryInsertOrGetExisting(defaultParams()); + await db1.updateDutySigned( + ROLLUP_ADDRESS, + VALIDATOR_ADDRESS, + SLOT, + DUTY_TYPE, + SIGNATURE, + record.lockToken, + BLOCK_INDEX, + ); + await db1.close(); + + // Restart: reopen the same store. + const { db: db2 } = await openDb(); + try { + const result = await db2.tryInsertOrGetExisting(defaultParams()); + + // The existing SIGNED record is returned; the node must not sign again. + expect(result.isNew).toBe(false); + expect(result.record.status).toBe(DutyStatus.SIGNED); + expect(result.record.signature).toBe(SIGNATURE); + } finally { + await db2.close(); + } + }); + + it('should allow re-signing after crash-cleanup of a stuck SIGNING duty', async () => { + // First run: node starts signing but crashes before completing. The lockToken + // held in memory is lost; the duty is left in SIGNING state on disk. + const { db: db1 } = await openDb(); + await db1.tryInsertOrGetExisting(defaultParams()); + await db1.close(); // crash - lockToken is lost + + // Restart: the stuck SIGNING duty is visible on disk. + dateProvider.advanceTime(120); + const { db: db2 } = await openDb(); + try { + const stuck = await db2.tryInsertOrGetExisting(defaultParams()); + expect(stuck.isNew).toBe(false); + expect(stuck.record.status).toBe(DutyStatus.SIGNING); + + // On startup, the node cleans up its own stuck duties + const cleaned = await db2.cleanupOwnStuckDuties(NODE_ID, 60_000); + expect(cleaned).toBe(1); + + // The duty is gone; the node can now safely re-attempt the signing. + const retry = await db2.tryInsertOrGetExisting(defaultParams()); + expect(retry.isNew).toBe(true); + } finally { + await db2.close(); + } + }); +}); diff --git a/yarn-project/validator-ha-signer/src/db/lmdb.ts b/yarn-project/validator-ha-signer/src/db/lmdb.ts new file mode 100644 index 000000000000..1a8c8c9a8f90 --- /dev/null +++ b/yarn-project/validator-ha-signer/src/db/lmdb.ts @@ -0,0 +1,264 @@ +/** + * LMDB implementation of SlashingProtectionDatabase + * + * Provides local (single-node) double-signing protection using LMDB as the backend. + * Suitable for nodes that do NOT run in a high-availability multi-node setup. + * + * The LMDB store is single-writer, making setIfNotExists inherently atomic. + * This means we get crash-restart protection without needing an external database. + */ +import { SlotNumber } from '@aztec/foundation/branded-types'; +import { randomBytes } from '@aztec/foundation/crypto/random'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import type { DateProvider } from '@aztec/foundation/timer'; +import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; + +import type { SlashingProtectionDatabase, TryInsertOrGetResult } from '../types.js'; +import { + type CheckAndRecordParams, + DutyStatus, + DutyType, + type StoredDutyRecord, + getBlockIndexFromDutyIdentifier, + recordFromFields, +} from './types.js'; + +function dutyKey( + rollupAddress: string, + validatorAddress: string, + slot: string, + dutyType: string, + blockIndexWithinCheckpoint: number, +): string { + return `${rollupAddress}:${validatorAddress}:${slot}:${dutyType}:${blockIndexWithinCheckpoint}`; +} + +/** + * LMDB-backed implementation of SlashingProtectionDatabase. + * + * Provides single-node double-signing protection that survives crashes and restarts. + * Does not provide cross-node coordination (that requires the PostgreSQL implementation). + */ +export class LmdbSlashingProtectionDatabase implements SlashingProtectionDatabase { + public static readonly SCHEMA_VERSION = 1; + + private readonly duties: AztecAsyncMap; + private readonly log: Logger; + + constructor( + private readonly store: AztecAsyncKVStore, + private readonly dateProvider: DateProvider, + ) { + this.log = createLogger('slashing-protection:lmdb'); + this.duties = store.openMap('signing-protection-duties'); + } + + /** + * Atomically try to insert a new duty record, or get the existing one if present. + * + * LMDB is single-writer so the read-then-write inside transactionAsync is naturally atomic. + */ + public async tryInsertOrGetExisting(params: CheckAndRecordParams): Promise { + const blockIndexWithinCheckpoint = getBlockIndexFromDutyIdentifier(params); + const key = dutyKey( + params.rollupAddress.toString(), + params.validatorAddress.toString(), + params.slot.toString(), + params.dutyType, + blockIndexWithinCheckpoint, + ); + + const lockToken = randomBytes(16).toString('hex'); + const now = this.dateProvider.now(); + + const result = await this.store.transactionAsync(async () => { + const existing = await this.duties.getAsync(key); + if (existing) { + return { isNew: false as const, record: { ...existing, lockToken: '' } }; + } + + const newRecord: StoredDutyRecord = { + rollupAddress: params.rollupAddress.toString(), + validatorAddress: params.validatorAddress.toString(), + slot: params.slot.toString(), + blockNumber: params.blockNumber.toString(), + blockIndexWithinCheckpoint, + dutyType: params.dutyType, + status: DutyStatus.SIGNING, + messageHash: params.messageHash, + nodeId: params.nodeId, + lockToken, + startedAtMs: now, + }; + await this.duties.set(key, newRecord); + return { isNew: true as const, record: newRecord }; + }); + + if (result.isNew) { + this.log.debug(`Acquired lock for duty ${params.dutyType} at slot ${params.slot}`, { + validatorAddress: params.validatorAddress.toString(), + nodeId: params.nodeId, + }); + } + + return { isNew: result.isNew, record: recordFromFields(result.record) }; + } + + /** + * Update a duty to 'signed' status with the signature. + * Only succeeds if the lockToken matches. + */ + public updateDutySigned( + rollupAddress: EthAddress, + validatorAddress: EthAddress, + slot: SlotNumber, + dutyType: DutyType, + signature: string, + lockToken: string, + blockIndexWithinCheckpoint: number, + ): Promise { + const key = dutyKey( + rollupAddress.toString(), + validatorAddress.toString(), + slot.toString(), + dutyType, + blockIndexWithinCheckpoint, + ); + + return this.store.transactionAsync(async () => { + const existing = await this.duties.getAsync(key); + if (!existing) { + this.log.warn('Failed to update duty to signed: duty not found', { + rollupAddress: rollupAddress.toString(), + validatorAddress: validatorAddress.toString(), + slot: slot.toString(), + dutyType, + blockIndexWithinCheckpoint, + }); + return false; + } + + if (existing.lockToken !== lockToken) { + this.log.warn('Failed to update duty to signed: invalid token', { + rollupAddress: rollupAddress.toString(), + validatorAddress: validatorAddress.toString(), + slot: slot.toString(), + dutyType, + blockIndexWithinCheckpoint, + }); + return false; + } + + await this.duties.set(key, { + ...existing, + status: DutyStatus.SIGNED, + signature, + completedAtMs: this.dateProvider.now(), + }); + + return true; + }); + } + + /** + * Delete a duty record. + * Only succeeds if the lockToken matches. + */ + public deleteDuty( + rollupAddress: EthAddress, + validatorAddress: EthAddress, + slot: SlotNumber, + dutyType: DutyType, + lockToken: string, + blockIndexWithinCheckpoint: number, + ): Promise { + const key = dutyKey( + rollupAddress.toString(), + validatorAddress.toString(), + slot.toString(), + dutyType, + blockIndexWithinCheckpoint, + ); + + return this.store.transactionAsync(async () => { + const existing = await this.duties.getAsync(key); + if (!existing || existing.lockToken !== lockToken) { + this.log.warn('Failed to delete duty: invalid token or duty not found', { + rollupAddress: rollupAddress.toString(), + validatorAddress: validatorAddress.toString(), + slot: slot.toString(), + dutyType, + blockIndexWithinCheckpoint, + }); + return false; + } + + await this.duties.delete(key); + return true; + }); + } + + /** + * Cleanup own stuck duties (SIGNING status older than maxAgeMs). + */ + public cleanupOwnStuckDuties(nodeId: string, maxAgeMs: number): Promise { + const cutoffMs = this.dateProvider.now() - maxAgeMs; + + return this.store.transactionAsync(async () => { + const keysToDelete: string[] = []; + for await (const [key, record] of this.duties.entriesAsync()) { + if (record.nodeId === nodeId && record.status === DutyStatus.SIGNING && record.startedAtMs < cutoffMs) { + keysToDelete.push(key); + } + } + for (const key of keysToDelete) { + await this.duties.delete(key); + } + return keysToDelete.length; + }); + } + + /** + * Cleanup duties with outdated rollup address. + * + * This is always a no-op for the LMDB implementation: the underlying store is created via + * DatabaseVersionManager (in factory.ts), which already resets the entire data directory at + * startup whenever the rollup address changes. + */ + public cleanupOutdatedRollupDuties(_currentRollupAddress: EthAddress): Promise { + return Promise.resolve(0); + } + + /** + * Cleanup old signed duties older than maxAgeMs. + */ + public cleanupOldDuties(maxAgeMs: number): Promise { + const cutoffMs = this.dateProvider.now() - maxAgeMs; + + return this.store.transactionAsync(async () => { + const keysToDelete: string[] = []; + for await (const [key, record] of this.duties.entriesAsync()) { + if ( + record.status === DutyStatus.SIGNED && + record.completedAtMs !== undefined && + record.completedAtMs < cutoffMs + ) { + keysToDelete.push(key); + } + } + for (const key of keysToDelete) { + await this.duties.delete(key); + } + return keysToDelete.length; + }); + } + + /** + * Close the underlying LMDB store. + */ + public async close(): Promise { + await this.store.close(); + this.log.debug('LMDB slashing protection database closed'); + } +} diff --git a/yarn-project/validator-ha-signer/src/db/postgres.ts b/yarn-project/validator-ha-signer/src/db/postgres.ts index 8c80f22ddd7b..d0d1c52dfa60 100644 --- a/yarn-project/validator-ha-signer/src/db/postgres.ts +++ b/yarn-project/validator-ha-signer/src/db/postgres.ts @@ -1,7 +1,7 @@ /** * PostgreSQL implementation of SlashingProtectionDatabase */ -import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { SlotNumber } from '@aztec/foundation/branded-types'; import { randomBytes } from '@aztec/foundation/crypto/random'; import { EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createLogger } from '@aztec/foundation/log'; @@ -20,7 +20,7 @@ import { UPDATE_DUTY_SIGNED, } from './schema.js'; import type { CheckAndRecordParams, DutyRow, DutyType, InsertOrGetRow, ValidatorDutyRecord } from './types.js'; -import { getBlockIndexFromDutyIdentifier } from './types.js'; +import { getBlockIndexFromDutyIdentifier, recordFromFields } from './types.js'; /** * Minimal pool interface for database operations. @@ -220,14 +220,16 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat } /** - * Convert a database row to a ValidatorDutyRecord + * Convert a database row to a ValidatorDutyRecord. + * Maps snake_case column names to StoredDutyRecord (camelCase, ms timestamps), + * then delegates to the shared recordFromFields() converter. */ private rowToRecord(row: DutyRow): ValidatorDutyRecord { - return { - rollupAddress: EthAddress.fromString(row.rollup_address), - validatorAddress: EthAddress.fromString(row.validator_address), - slot: SlotNumber.fromString(row.slot), - blockNumber: BlockNumber.fromString(row.block_number), + return recordFromFields({ + rollupAddress: row.rollup_address, + validatorAddress: row.validator_address, + slot: row.slot, + blockNumber: row.block_number, blockIndexWithinCheckpoint: row.block_index_within_checkpoint, dutyType: row.duty_type, status: row.status, @@ -235,10 +237,10 @@ export class PostgresSlashingProtectionDatabase implements SlashingProtectionDat signature: row.signature ?? undefined, nodeId: row.node_id, lockToken: row.lock_token, - startedAt: row.started_at, - completedAt: row.completed_at ?? undefined, + startedAtMs: row.started_at.getTime(), + completedAtMs: row.completed_at?.getTime(), errorMessage: row.error_message ?? undefined, - }; + }); } /** diff --git a/yarn-project/validator-ha-signer/src/db/types.ts b/yarn-project/validator-ha-signer/src/db/types.ts index b97ff2de188d..8b0572cab924 100644 --- a/yarn-project/validator-ha-signer/src/db/types.ts +++ b/yarn-project/validator-ha-signer/src/db/types.ts @@ -1,5 +1,10 @@ -import type { BlockNumber, CheckpointNumber, IndexWithinCheckpoint, SlotNumber } from '@aztec/foundation/branded-types'; -import type { EthAddress } from '@aztec/foundation/eth-address'; +import { + BlockNumber, + type CheckpointNumber, + type IndexWithinCheckpoint, + SlotNumber, +} from '@aztec/foundation/branded-types'; +import { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; import { DutyType } from '@aztec/stdlib/ha-signing'; @@ -23,6 +28,30 @@ export interface DutyRow { error_message: string | null; } +/** + * Plain-primitive representation of a duty record suitable for serialization + * (e.g. msgpackr for LMDB). All domain types are stored as their string/number + * equivalents. Timestamps are Unix milliseconds. + */ +export interface StoredDutyRecord { + rollupAddress: string; + validatorAddress: string; + slot: string; + blockNumber: string; + blockIndexWithinCheckpoint: number; + dutyType: DutyType; + status: DutyStatus; + messageHash: string; + signature?: string; + nodeId: string; + lockToken: string; + /** Unix timestamp in milliseconds when signing started */ + startedAtMs: number; + /** Unix timestamp in milliseconds when signing completed */ + completedAtMs?: number; + errorMessage?: string; +} + /** * Row type from INSERT_OR_GET_DUTY query (includes is_new flag) */ @@ -42,7 +71,8 @@ export enum DutyStatus { export { DutyType }; /** - * Record of a validator duty in the database + * Rich representation of a validator duty, with branded types and Date objects. + * This is the common output type returned by all SlashingProtectionDatabase implementations. */ export interface ValidatorDutyRecord { /** Ethereum address of the rollup contract */ @@ -75,6 +105,31 @@ export interface ValidatorDutyRecord { errorMessage?: string; } +/** + * Convert a {@link StoredDutyRecord} (plain-primitive wire format) to a + * {@link ValidatorDutyRecord} (rich domain type). + * + * Shared by LMDB and any future non-Postgres backend implementations. + */ +export function recordFromFields(stored: StoredDutyRecord): ValidatorDutyRecord { + return { + rollupAddress: EthAddress.fromString(stored.rollupAddress), + validatorAddress: EthAddress.fromString(stored.validatorAddress), + slot: SlotNumber.fromString(stored.slot), + blockNumber: BlockNumber.fromString(stored.blockNumber), + blockIndexWithinCheckpoint: stored.blockIndexWithinCheckpoint, + dutyType: stored.dutyType, + status: stored.status, + messageHash: stored.messageHash, + signature: stored.signature, + nodeId: stored.nodeId, + lockToken: stored.lockToken, + startedAt: new Date(stored.startedAtMs), + completedAt: stored.completedAtMs !== undefined ? new Date(stored.completedAtMs) : undefined, + errorMessage: stored.errorMessage, + }; +} + /** * Duty identifier for block proposals. * blockIndexWithinCheckpoint is REQUIRED and must be >= 0. diff --git a/yarn-project/validator-ha-signer/src/factory.ts b/yarn-project/validator-ha-signer/src/factory.ts index db52dacf05ff..3f9e09e69f00 100644 --- a/yarn-project/validator-ha-signer/src/factory.ts +++ b/yarn-project/validator-ha-signer/src/factory.ts @@ -2,14 +2,16 @@ * Factory functions for creating validator HA signers */ import { DateProvider } from '@aztec/foundation/timer'; -import type { ValidatorHASignerConfig } from '@aztec/stdlib/ha-signing'; +import { createStore } from '@aztec/kv-store/lmdb-v2'; +import type { LocalSignerConfig, ValidatorHASignerConfig } from '@aztec/stdlib/ha-signing'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { Pool } from 'pg'; +import { LmdbSlashingProtectionDatabase } from './db/lmdb.js'; import { PostgresSlashingProtectionDatabase } from './db/postgres.js'; import { HASignerMetrics } from './metrics.js'; -import type { CreateHASignerDeps, SlashingProtectionDatabase } from './types.js'; +import type { CreateHASignerDeps, CreateLocalSignerWithProtectionDeps, SlashingProtectionDatabase } from './types.js'; import { ValidatorHASigner } from './validator_ha_signer.js'; /** @@ -27,7 +29,6 @@ import { ValidatorHASigner } from './validator_ha_signer.js'; * ```typescript * const { signer, db } = await createHASigner({ * databaseUrl: process.env.DATABASE_URL, - * haSigningEnabled: true, * nodeId: 'validator-node-1', * pollingIntervalMs: 100, * signingTimeoutMs: 3000, @@ -87,7 +88,52 @@ export async function createHASigner( const metrics = new HASignerMetrics(telemetryClient, signerConfig.nodeId); // Create signer - const signer = new ValidatorHASigner(db, { ...signerConfig, databaseUrl }, { metrics, dateProvider }); + const signer = new ValidatorHASigner(db, signerConfig, { metrics, dateProvider }); + + return { signer, db }; +} + +/** + * Create a local (single-node) signing protection signer backed by LMDB. + * + * This provides double-signing protection for nodes that are NOT running in a + * high-availability (multi-node) setup. It prevents a proposer from sending two + * proposals for the same slot if the node crashes and restarts mid-proposal. + * + * When `config.dataDirectory` is set, the protection database is persisted to disk + * and survives crashes/restarts. When unset, an ephemeral in-memory store is + * used which protects within a single run but not across restarts. + * + * @param config - Local signer config + * @param deps - Optional dependencies (telemetry, date provider). + * @returns An object containing the signer and database instances. + */ +export async function createLocalSignerWithProtection( + config: LocalSignerConfig, + deps?: CreateLocalSignerWithProtectionDeps, +): Promise<{ + signer: ValidatorHASigner; + db: SlashingProtectionDatabase; +}> { + const telemetryClient = deps?.telemetryClient ?? getTelemetryClient(); + const dateProvider = deps?.dateProvider ?? new DateProvider(); + + const kvStore = await createStore('signing-protection', LmdbSlashingProtectionDatabase.SCHEMA_VERSION, { + dataDirectory: config.dataDirectory, + dataStoreMapSizeKb: config.signingProtectionMapSizeKb ?? config.dataStoreMapSizeKb, + l1Contracts: config.l1Contracts, + }); + + const db = new LmdbSlashingProtectionDatabase(kvStore, dateProvider); + + const signerConfig = { + ...config, + nodeId: config.nodeId || 'local', + }; + + const metrics = new HASignerMetrics(telemetryClient, signerConfig.nodeId, 'LocalSigningProtectionMetrics'); + + const signer = new ValidatorHASigner(db, signerConfig, { metrics, dateProvider }); return { signer, db }; } diff --git a/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts b/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts index 9a776a70ed16..2420ea6bf302 100644 --- a/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts +++ b/yarn-project/validator-ha-signer/src/slashing_protection_service.test.ts @@ -3,7 +3,7 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; import { sleep } from '@aztec/foundation/sleep'; import { TestDateProvider } from '@aztec/foundation/timer'; -import { DutyType, type ValidatorHASignerConfig } from '@aztec/stdlib/ha-signing'; +import { type BaseSignerConfig, DutyType } from '@aztec/stdlib/ha-signing'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { PGlite } from '@electric-sql/pglite'; @@ -36,7 +36,7 @@ describe('SlashingProtectionService', () => { let pool: Pool; let db: PostgresSlashingProtectionDatabase; let service: SlashingProtectionService; - let config: ValidatorHASignerConfig; + let config: BaseSignerConfig; let dateProvider: TestDateProvider; const telemetryClient = getTelemetryClient(); @@ -51,7 +51,6 @@ describe('SlashingProtectionService', () => { dateProvider = new TestDateProvider(); config = { - haSigningEnabled: true, l1Contracts: { rollupAddress: ROLLUP_ADDRESS }, nodeId: NODE_ID, pollingIntervalMs: 50, diff --git a/yarn-project/validator-ha-signer/src/slashing_protection_service.ts b/yarn-project/validator-ha-signer/src/slashing_protection_service.ts index 232276e8738e..ac344c1f379e 100644 --- a/yarn-project/validator-ha-signer/src/slashing_protection_service.ts +++ b/yarn-project/validator-ha-signer/src/slashing_protection_service.ts @@ -8,7 +8,7 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/promise'; import { sleep } from '@aztec/foundation/sleep'; import type { DateProvider } from '@aztec/foundation/timer'; -import type { ValidatorHASignerConfig } from '@aztec/stdlib/ha-signing'; +import type { BaseSignerConfig } from '@aztec/stdlib/ha-signing'; import { type CheckAndRecordParams, @@ -55,7 +55,7 @@ export class SlashingProtectionService { constructor( private readonly db: SlashingProtectionDatabase, - private readonly config: ValidatorHASignerConfig, + private readonly config: BaseSignerConfig, deps: SlashingProtectionServiceDeps, ) { this.log = createLogger('slashing-protection'); diff --git a/yarn-project/validator-ha-signer/src/types.ts b/yarn-project/validator-ha-signer/src/types.ts index 07626bab2c64..58b2cc063031 100644 --- a/yarn-project/validator-ha-signer/src/types.ts +++ b/yarn-project/validator-ha-signer/src/types.ts @@ -70,6 +70,11 @@ export interface CreateHASignerDeps { dateProvider?: DateProvider; } +/** + * deps for creating a local signing protection signer + */ +export type CreateLocalSignerWithProtectionDeps = Omit; + /** * Database interface for slashing protection operations * This abstraction allows for different database implementations (PostgreSQL, SQLite, etc.) diff --git a/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts b/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts index 17f4f62be56b..d657afcb37cf 100644 --- a/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts +++ b/yarn-project/validator-ha-signer/src/validator_ha_signer.test.ts @@ -4,7 +4,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; import { TestDateProvider } from '@aztec/foundation/timer'; -import { type ValidatorHASignerConfig, defaultValidatorHASignerConfig } from '@aztec/stdlib/ha-signing'; +import { type BaseSignerConfig, defaultValidatorHASignerConfig } from '@aztec/stdlib/ha-signing'; import { getTelemetryClient } from '@aztec/telemetry-client'; import { PGlite } from '@electric-sql/pglite'; @@ -34,7 +34,7 @@ describe('ValidatorHASigner', () => { let pglite: PGlite; let pool: Pool; let db: PostgresSlashingProtectionDatabase; - let config: ValidatorHASignerConfig; + let config: BaseSignerConfig; let dateProvider: TestDateProvider; const telemetryClient = getTelemetryClient(); @@ -49,13 +49,11 @@ describe('ValidatorHASigner', () => { dateProvider = new TestDateProvider(); config = { - haSigningEnabled: true, l1Contracts: { rollupAddress: EthAddress.random() }, nodeId: NODE_ID, pollingIntervalMs: 50, signingTimeoutMs: 1000, maxStuckDutiesAgeMs: 60_000, - databaseUrl: 'postgresql://user:pass@localhost:5432/testdb', }; }); @@ -75,25 +73,8 @@ describe('ValidatorHASigner', () => { l1Contracts: { rollupAddress: EthAddress.random() }, }; const metrics = new HASignerMetrics(telemetryClient, 'test-node'); - expect( - () => - new ValidatorHASigner( - db, - { - ...defaultConfig, - databaseUrl: 'postgresql://user:pass@localhost:5432/testdb', - haSigningEnabled: true, - }, - { metrics, dateProvider }, - ), - ).toThrow('NODE_ID is required for high-availability setups'); - }); - - it('should not initialize when enabled is false', () => { - const disabledConfig = { ...config, haSigningEnabled: false }; - const metrics = new HASignerMetrics(telemetryClient, 'test-node'); - expect(() => new ValidatorHASigner(db, disabledConfig, { metrics, dateProvider })).toThrow( - 'Validator HA Signer is not enabled in config', + expect(() => new ValidatorHASigner(db, defaultConfig, { metrics, dateProvider })).toThrow( + 'NODE_ID is required for high-availability setups', ); }); }); diff --git a/yarn-project/validator-ha-signer/src/validator_ha_signer.ts b/yarn-project/validator-ha-signer/src/validator_ha_signer.ts index 724ce2eb0b63..f16f12c53bc6 100644 --- a/yarn-project/validator-ha-signer/src/validator_ha_signer.ts +++ b/yarn-project/validator-ha-signer/src/validator_ha_signer.ts @@ -11,9 +11,9 @@ import type { Signature } from '@aztec/foundation/eth-signature'; import { type Logger, createLogger } from '@aztec/foundation/log'; import type { DateProvider } from '@aztec/foundation/timer'; import { + type BaseSignerConfig, DutyType, type HAProtectedSigningContext, - type ValidatorHASignerConfig, getBlockNumberFromSigningContext, } from '@aztec/stdlib/ha-signing'; @@ -56,7 +56,7 @@ export class ValidatorHASigner { constructor( db: SlashingProtectionDatabase, - private readonly config: ValidatorHASignerConfig, + private readonly config: BaseSignerConfig, deps: ValidatorHASignerDeps, ) { this.log = createLogger('validator-ha-signer'); @@ -64,11 +64,6 @@ export class ValidatorHASigner { this.metrics = deps.metrics; this.dateProvider = deps.dateProvider; - if (!config.haSigningEnabled) { - // this shouldn't happen, the validator should use different signer for non-HA setups - throw new Error('Validator HA Signer is not enabled in config'); - } - if (!config.nodeId || config.nodeId === '') { throw new Error('NODE_ID is required for high-availability setups'); } diff --git a/yarn-project/validator-ha-signer/tsconfig.json b/yarn-project/validator-ha-signer/tsconfig.json index ac3e90b38446..33971f5eff11 100644 --- a/yarn-project/validator-ha-signer/tsconfig.json +++ b/yarn-project/validator-ha-signer/tsconfig.json @@ -12,6 +12,9 @@ { "path": "../foundation" }, + { + "path": "../kv-store" + }, { "path": "../stdlib" }, diff --git a/yarn-project/world-state/src/synchronizer/factory.ts b/yarn-project/world-state/src/synchronizer/factory.ts index 56b18afbeaf4..f6cc55001a4a 100644 --- a/yarn-project/world-state/src/synchronizer/factory.ts +++ b/yarn-project/world-state/src/synchronizer/factory.ts @@ -1,6 +1,6 @@ import type { LoggerBindings } from '@aztec/foundation/log'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import type { L2BlockSource } from '@aztec/stdlib/block'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; diff --git a/yarn-project/world-state/src/test/integration.test.ts b/yarn-project/world-state/src/test/integration.test.ts index 4f75871da279..8ceb875d4669 100644 --- a/yarn-project/world-state/src/test/integration.test.ts +++ b/yarn-project/world-state/src/test/integration.test.ts @@ -5,8 +5,8 @@ import type { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; -import type { DataStoreConfig } from '@aztec/kv-store/config'; import type { Checkpoint } from '@aztec/stdlib/checkpoint'; +import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { describe, jest } from '@jest/globals'; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index b94c9a028b37..eac9a7f96775 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -2260,6 +2260,7 @@ __metadata: dependencies: "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:^" "@aztec/stdlib": "workspace:^" "@aztec/telemetry-client": "workspace:^" "@electric-sql/pglite": "npm:^0.3.14" From b9b4a215e1ddacf67d69a25ce0d3af4c2e17176b Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Thu, 5 Mar 2026 13:27:32 +0100 Subject: [PATCH 28/37] feat: Remove non-protocol contracts from public setup allowlist (#21154) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Removes non-protocol contracts (Token class-based entries) from the default public setup allowlist for alpha. Token class IDs change with aztec-nr releases, making the allowlist hard to maintain—and FPC-based fee payment with custom tokens won't be supported on mainnet alpha. - **Removed Token entries from the default allowlist** (`allowed_public_setup.ts`): only protocol contracts (AuthRegistry, FeeJuice) remain in the hardcoded defaults - **Extended `parseAllowList` to support validation flags**: new optional flags segment (`os`, `rn`, `cl=N`) so node operators who manually re-add entries get proper `onlySelf`, `rejectNullMsgSender`, and `calldataLength` validation - **Updated e2e tests to manually extend the allowlist**: `FeesTest` and `ClientFlowsBenchmark` now compute Token allowlist entries and pass them via `txPublicSetupAllowListExtend` - **Updated local network node** (`local-network.ts`): computes Token allowlist entries at startup so FPC-based fee payments continue to work in local development and CI - **Deprecated `PublicFeePaymentMethod` and `PrivateFeePaymentMethod`** in aztec.js with `@deprecated` JSDoc tags - **Added CLI wallet deprecation warnings** for `fpc-public` and `fpc-private` payment methods - **Added warning comment to FPC Noir contract** clarifying it's a reference implementation that won't work on mainnet alpha - **Updated v4 changelog** with the breaking change, new flag syntax documentation, and migration guidance ## Test plan - [x] Unit tests: `p2p/src/config.test.ts` (11 tests including 4 new flag parsing tests) - [x] Unit tests: `p2p/src/msg_validators/tx_validator/phases_validator.test.ts` (23 tests) - [x] E2E tests: all 8 fee test suites (26 tests total) — public_payments, private_payments, failures, account_init, gas_estimation, sponsored_payments, fee_juice_payments, fee_settings - [ ] E2E: `e2e_local_network_example.test.ts` (requires running local network — unchanged, validated via local-network.ts code review) - [x] Alert `@AztecProtocol/devrel` to update docs Fixes A-606 --------- Co-authored-by: Santiago Palladino --- .../operators/reference/changelog/v4.md | 28 ++++++++++- .../contracts/fees/fpc_contract/src/main.nr | 4 ++ .../src/fee/private_fee_payment_method.ts | 1 + .../src/fee/public_fee_payment_method.ts | 1 + .../aztec/src/local-network/local-network.ts | 43 +++++++++++++++- .../cli-wallet/src/utils/options/fees.ts | 6 +++ .../client_flows/client_flows_benchmark.ts | 38 +++++++++++++- .../end-to-end/src/e2e_fees/fees_test.ts | 49 ++++++++++++++++++- yarn-project/p2p/src/config.test.ts | 34 +++++++++++++ yarn-project/p2p/src/config.ts | 42 ++++++++++++++-- .../tx_validator/allowed_public_setup.ts | 19 ------- 11 files changed, 239 insertions(+), 26 deletions(-) diff --git a/docs/docs-operate/operators/reference/changelog/v4.md b/docs/docs-operate/operators/reference/changelog/v4.md index 83b29d4758a6..0e30180c1abc 100644 --- a/docs/docs-operate/operators/reference/changelog/v4.md +++ b/docs/docs-operate/operators/reference/changelog/v4.md @@ -125,10 +125,36 @@ The variable fully **replaced** the hardcoded defaults. Format allowed entries w --txPublicSetupAllowListExtend ($TX_PUBLIC_SETUP_ALLOWLIST) ``` -The variable now **extends** the hardcoded defaults (which are always present). Selectors are now mandatory. Format: `I:address:selector,C:classId:selector`. +The variable now **extends** the hardcoded defaults (which are always present). Selectors are now mandatory. An optional flags segment can be appended for additional validation: + +``` +I:address:selector[:flags] +C:classId:selector[:flags] +``` + +Where `flags` is a `+`-separated list of: +- `os` — `onlySelf`: only allow calls where msg_sender == contract address +- `rn` — `rejectNullMsgSender`: reject calls with a null msg_sender +- `cl=N` — `calldataLength`: enforce exact calldata length of N fields + +Example: `C:0xabc:0x1234:os+cl=4` **Migration**: If you were using `TX_PUBLIC_SETUP_ALLOWLIST`, ensure all entries include function selectors. Note the variable now adds to defaults rather than replacing them. If you were not setting this variable, no action is needed — the hardcoded defaults now include the correct selectors automatically. +### Token removed from default setup allowlist + +Token class-based entries (`_increase_public_balance` and `transfer_in_public`) have been removed from the default public setup allowlist. FPC-based fee payments using custom tokens no longer work out of the box. + +This change was made because Token class IDs change with aztec-nr releases, making the allowlist impossible to keep up to date with new library releases. In addition, `transfer_in_public` requires complex additional logic to be built into the node to prevent mass transaction invalidation attacks. **FPC-based fee payment with custom tokens won't work on mainnet alpha**. + +**Migration**: Node operators who need FPC support must manually add Token entries via `TX_PUBLIC_SETUP_ALLOWLIST`. Example: + +```bash +TX_PUBLIC_SETUP_ALLOWLIST="C:::os+cl=3,C:::cl=5" +``` + +Replace `` with the deployed Token contract class ID and ``/`` with the respective function selectors. Keep in mind that this will only work on local network setups, since even if you as an operator add these entries, other nodes will not have them and will not pick up these transactions. + ## Removed features ## New features diff --git a/noir-projects/noir-contracts/contracts/fees/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fees/fpc_contract/src/main.nr index 62014342f741..c0b209eb6451 100644 --- a/noir-projects/noir-contracts/contracts/fees/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fees/fpc_contract/src/main.nr @@ -6,6 +6,10 @@ use aztec::macros::aztec; /// Fee Payment Contract (FPC) allows users to pay for the transaction fee with an arbitrary asset. Supports private /// and public fee payment flows. /// +/// **WARNING**: This is an example/reference implementation. FPC-based fee payment with custom tokens will NOT work +/// on mainnet alpha because Token class IDs change with aztec-nr releases and are not included in the default public +/// setup allowlist. +/// /// ***Note:*** /// Accepted asset funds sent by the users to this contract stay in this contract and later on can /// be pulled by the admin using the `pull_funds` function. diff --git a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts index 891c52f79a64..61277579681c 100644 --- a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts @@ -10,6 +10,7 @@ import type { FeePaymentMethod } from './fee_payment_method.js'; /** * Holds information about how the fee for a transaction is to be paid. + * @deprecated Is not supported on mainnet. Use {@link FeeJuicePaymentMethodWithClaim} or `SponsoredFeePaymentMethod` instead. */ export class PrivateFeePaymentMethod implements FeePaymentMethod { private assetPromise: Promise | null = null; diff --git a/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts index 2847a40f1dea..98f20c961241 100644 --- a/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/public_fee_payment_method.ts @@ -11,6 +11,7 @@ import type { FeePaymentMethod } from './fee_payment_method.js'; /** * Holds information about how the fee for a transaction is to be paid. + * @deprecated Is not supported on mainnet. Use {@link FeeJuicePaymentMethodWithClaim} or `SponsoredFeePaymentMethod` instead. */ export class PublicFeePaymentMethod implements FeePaymentMethod { private assetPromise: Promise | null = null; diff --git a/yarn-project/aztec/src/local-network/local-network.ts b/yarn-project/aztec/src/local-network/local-network.ts index a2d04e8b22ab..4f62ea214738 100644 --- a/yarn-project/aztec/src/local-network/local-network.ts +++ b/yarn-project/aztec/src/local-network/local-network.ts @@ -16,11 +16,15 @@ import { SecretValue } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; import type { LogFn } from '@aztec/foundation/log'; import { DateProvider, TestDateProvider } from '@aztec/foundation/timer'; +import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractsHash } from '@aztec/protocol-contracts'; import { SequencerState } from '@aztec/sequencer-client'; +import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; +import type { FunctionAbi } from '@aztec/stdlib/abi'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; +import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; +import type { AllowedElement, ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import { type TelemetryClient, @@ -44,6 +48,38 @@ import { getSponsoredFPCAddress } from './sponsored_fpc.js'; const logger = createLogger('local-network'); +/** + * Returns Token-specific allowlist entries for FPC-based fee payments. + * The local network deploys a banana FPC and Token contracts, so the node must allow Token setup functions. + */ +async function getTokenAllowedSetupFunctions(): Promise { + const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; + const allFunctions: FunctionAbi[] = (TokenContractArtifact.functions as FunctionAbi[]).concat( + TokenContractArtifact.nonDispatchPublicFunctions || [], + ); + const getCalldataLength = (name: string) => { + const fn = allFunctions.find(f => f.name === name)!; + return 1 + countArgumentsSize(fn); + }; + const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); + const transferInPublicSelector = await FunctionSelector.fromSignature( + 'transfer_in_public((Field),(Field),u128,Field)', + ); + return [ + { + classId: tokenClassId, + selector: increaseBalanceSelector, + calldataLength: getCalldataLength('_increase_public_balance'), + onlySelf: true, + }, + { + classId: tokenClassId, + selector: transferInPublicSelector, + calldataLength: getCalldataLength('transfer_in_public'), + }, + ]; +} + const localAnvil = foundry; /** @@ -102,9 +138,14 @@ export async function createLocalNetwork(config: Partial = { logger.warn(`Multiple L1 RPC URLs provided. Local networks will only use the first one: ${l1RpcUrl}`); } + // The local network deploys a banana FPC with Token contracts, so include Token entries + // in the setup allowlist so FPC-based fee payments work out of the box. + const tokenAllowList = await getTokenAllowedSetupFunctions(); + const aztecNodeConfig: AztecNodeConfig = { ...getConfigEnvVars(), ...config, + txPublicSetupAllowListExtend: [...tokenAllowList, ...(config.txPublicSetupAllowListExtend ?? [])], }; const hdAccount = mnemonicToAccount(config.l1Mnemonic || DefaultMnemonic); if ( diff --git a/yarn-project/cli-wallet/src/utils/options/fees.ts b/yarn-project/cli-wallet/src/utils/options/fees.ts index 79e89ff8e60c..908de366f549 100644 --- a/yarn-project/cli-wallet/src/utils/options/fees.ts +++ b/yarn-project/cli-wallet/src/utils/options/fees.ts @@ -171,6 +171,9 @@ export function parsePaymentMethod( case 'fpc-public': { const fpc = getFpc(); const asset = getAsset(); + log( + `WARNING: fpc-public is deprecated and will not work on mainnet alpha. Use fee_juice or fpc-sponsored instead.`, + ); log(`Using public fee payment with asset ${asset} via paymaster ${fpc}`); const { PublicFeePaymentMethod } = await import('@aztec/aztec.js/fee'); return new PublicFeePaymentMethod(fpc, from, wallet, gasSettings); @@ -178,6 +181,9 @@ export function parsePaymentMethod( case 'fpc-private': { const fpc = getFpc(); const asset = getAsset(); + log( + `WARNING: fpc-private is deprecated and will not work on mainnet alpha. Use fee_juice or fpc-sponsored instead.`, + ); log(`Using private fee payment with asset ${asset} via paymaster ${fpc}`); const { PrivateFeePaymentMethod } = await import('@aztec/aztec.js/fee'); return new PrivateFeePaymentMethod(fpc, from, wallet, gasSettings); diff --git a/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts b/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts index 689cfc2dcfef..26e6e78f3dc1 100644 --- a/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts +++ b/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts @@ -19,12 +19,16 @@ import { AMMContract } from '@aztec/noir-contracts.js/AMM'; import { FPCContract } from '@aztec/noir-contracts.js/FPC'; import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; import { SponsoredFPCContract } from '@aztec/noir-contracts.js/SponsoredFPC'; -import { TokenContract as BananaCoin, TokenContract } from '@aztec/noir-contracts.js/Token'; +import { TokenContract as BananaCoin, TokenContract, TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { type PXEConfig, getPXEConfig } from '@aztec/pxe/server'; +import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; +import type { FunctionAbi } from '@aztec/stdlib/abi'; +import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { GasSettings } from '@aztec/stdlib/gas'; +import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; import { deriveSigningKey } from '@aztec/stdlib/keys'; import { MNEMONIC } from '../../fixtures/fixtures.js'; @@ -42,6 +46,35 @@ import { type ClientFlowsConfig, FULL_FLOWS_CONFIG, KEY_FLOWS_CONFIG } from './c const { BENCHMARK_CONFIG } = process.env; +/** Returns Token-specific allowlist entries for FPC-based fee payments (test-only). */ +async function getTokenAllowedSetupFunctions(): Promise { + const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; + const allFunctions: FunctionAbi[] = (TokenContractArtifact.functions as FunctionAbi[]).concat( + TokenContractArtifact.nonDispatchPublicFunctions || [], + ); + const getCalldataLength = (name: string) => { + const fn = allFunctions.find(f => f.name === name)!; + return 1 + countArgumentsSize(fn); + }; + const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); + const transferInPublicSelector = await FunctionSelector.fromSignature( + 'transfer_in_public((Field),(Field),u128,Field)', + ); + return [ + { + classId: tokenClassId, + selector: increaseBalanceSelector, + calldataLength: getCalldataLength('_increase_public_balance'), + onlySelf: true, + }, + { + classId: tokenClassId, + selector: transferInPublicSelector, + calldataLength: getCalldataLength('transfer_in_public'), + }, + ]; +} + export type AccountType = 'ecdsar1' | 'schnorr'; export type FeePaymentMethodGetter = (wallet: Wallet, sender: AztecAddress) => Promise; export type BenchmarkingFeePaymentMethod = 'bridged_fee_juice' | 'private_fpc' | 'sponsored_fpc' | 'fee_juice'; @@ -130,11 +163,14 @@ export class ClientFlowsBenchmark { async setup() { this.logger.info('Setting up subsystems from fresh'); + // Token allowlist entries are test-only: FPC-based fee payment with custom tokens won't work on mainnet alpha. + const tokenAllowList = await getTokenAllowedSetupFunctions(); this.context = await setup(0, { ...this.setupOptions, fundSponsoredFPC: true, skipAccountDeployment: true, l1ContractsArgs: this.setupOptions, + txPublicSetupAllowListExtend: [...(this.setupOptions.txPublicSetupAllowListExtend ?? []), ...tokenAllowList], }); await this.applyBaseSetup(); diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts index d810c896ffc1..f163d566681d 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -14,12 +14,16 @@ import { AppSubscriptionContract } from '@aztec/noir-contracts.js/AppSubscriptio import { FPCContract } from '@aztec/noir-contracts.js/FPC'; import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; import { SponsoredFPCContract } from '@aztec/noir-contracts.js/SponsoredFPC'; -import { TokenContract as BananaCoin } from '@aztec/noir-contracts.js/Token'; +import { TokenContract as BananaCoin, TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { CounterContract } from '@aztec/noir-test-contracts.js/Counter'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; +import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; +import type { FunctionAbi } from '@aztec/stdlib/abi'; +import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; import { GasSettings } from '@aztec/stdlib/gas'; import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; +import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; import { getContract } from 'viem'; @@ -37,6 +41,46 @@ import { type BalancesFn, getBalancesFn, setupSponsoredFPC } from '../fixtures/u import { FeeJuicePortalTestingHarnessFactory, type GasBridgingTestHarness } from '../shared/gas_portal_test_harness.js'; import { TestWallet } from '../test-wallet/test_wallet.js'; +/** Returns the calldata length for a function: 1 (selector) + arguments size. */ +function getCalldataLength(functionName: string): number { + const allFunctions: FunctionAbi[] = (TokenContractArtifact.functions as FunctionAbi[]).concat( + TokenContractArtifact.nonDispatchPublicFunctions || [], + ); + const fn = allFunctions.find(f => f.name === functionName); + if (!fn) { + throw new Error(`Unknown function ${functionName} in Token artifact`); + } + return 1 + countArgumentsSize(fn); +} + +/** + * Returns Token-specific allowlist entries needed for FPC-based fee payments. + * These are test-only — FPC-based fee payment with custom tokens won't work on mainnet alpha. + */ +async function getTokenAllowedSetupFunctions(): Promise { + const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; + const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); + const transferInPublicSelector = await FunctionSelector.fromSignature( + 'transfer_in_public((Field),(Field),u128,Field)', + ); + + return [ + // Token: needed for private transfers via FPC (transfer_to_public enqueues this) + { + classId: tokenClassId, + selector: increaseBalanceSelector, + calldataLength: getCalldataLength('_increase_public_balance'), + onlySelf: true, + }, + // Token: needed for public transfers via FPC (fee_entrypoint_public enqueues this) + { + classId: tokenClassId, + selector: transferInPublicSelector, + calldataLength: getCalldataLength('transfer_in_public'), + }, + ]; +} + /** * Test fixture for testing fees. Provides the following setup steps: * InitialAccounts: Initializes 3 Schnorr account contracts. @@ -104,12 +148,15 @@ export class FeesTest { async setup() { this.logger.verbose('Setting up fresh context...'); + // Token allowlist entries are test-only: FPC-based fee payment with custom tokens won't work on mainnet alpha. + const tokenAllowList = await getTokenAllowedSetupFunctions(); this.context = await setup(0, { startProverNode: true, ...this.setupOptions, fundSponsoredFPC: true, skipAccountDeployment: true, l1ContractsArgs: { ...this.setupOptions }, + txPublicSetupAllowListExtend: [...(this.setupOptions.txPublicSetupAllowListExtend ?? []), ...tokenAllowList], }); this.rollupContract = RollupContract.getFromConfig(this.context.config); diff --git a/yarn-project/p2p/src/config.test.ts b/yarn-project/p2p/src/config.test.ts index 7c80cedbf670..02185e3322c2 100644 --- a/yarn-project/p2p/src/config.test.ts +++ b/yarn-project/p2p/src/config.test.ts @@ -21,6 +21,40 @@ describe('config', () => { expect(allowList).toEqual(config); }); + it('parses entries with flags', async () => { + const address = await AztecAddress.random(); + const selector = FunctionSelector.random(); + const classId = Fr.random(); + const classSelector = FunctionSelector.random(); + + const allowList = parseAllowList(`I:${address}:${selector}:os+cl=4,C:${classId}:${classSelector}:rn+os+cl=10`); + + expect(allowList).toEqual([ + { address, selector, onlySelf: true, calldataLength: 4 }, + { classId, selector: classSelector, rejectNullMsgSender: true, onlySelf: true, calldataLength: 10 }, + ]); + }); + + it('parses entries without flags (backward compat)', async () => { + const address = await AztecAddress.random(); + const selector = FunctionSelector.random(); + + const allowList = parseAllowList(`I:${address}:${selector}`); + expect(allowList).toEqual([{ address, selector }]); + }); + + it('rejects entry with unknown flag', async () => { + const address = await AztecAddress.random(); + const selector = FunctionSelector.random(); + expect(() => parseAllowList(`I:${address}:${selector}:unknown`)).toThrow('unknown flag'); + }); + + it('rejects entry with invalid calldataLength', async () => { + const address = await AztecAddress.random(); + const selector = FunctionSelector.random(); + expect(() => parseAllowList(`I:${address}:${selector}:cl=abc`)).toThrow('invalid calldataLength'); + }); + it('rejects instance entry without selector', async () => { const address = await AztecAddress.random(); expect(() => parseAllowList(`I:${address}`)).toThrow('selector is required'); diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index f3dbefe44d4d..9f8af3d582e5 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -410,9 +410,9 @@ export const p2pConfigMappings: ConfigMappingsType = { env: 'TX_PUBLIC_SETUP_ALLOWLIST', parseEnv: (val: string) => parseAllowList(val), description: - 'Additional entries to extend the default setup allow list. Format: I:address:selector,C:classId:selector', + 'Additional entries to extend the default setup allow list. Format: I:address:selector[:flags],C:classId:selector[:flags]. Flags: os (onlySelf), rn (rejectNullMsgSender), cl=N (calldataLength), joined with +.', printDefault: () => - 'Default: AuthRegistry._set_authorized, FeeJuice._increase_public_balance, Token._increase_public_balance, Token.transfer_in_public', + 'Default: AuthRegistry._set_authorized, AuthRegistry.set_authorized, FeeJuice._increase_public_balance', }, maxPendingTxCount: { env: 'P2P_MAX_PENDING_TX_COUNT', @@ -541,12 +541,45 @@ export const bootnodeConfigMappings = pickConfigMappings( bootnodeConfigKeys, ); +/** + * Parses a `+`-separated flags string into validation properties for an allow list entry. + * Supported flags: `os` (onlySelf), `rn` (rejectNullMsgSender), `cl=N` (calldataLength). + */ +function parseFlags( + flags: string, + entry: string, +): { onlySelf?: boolean; rejectNullMsgSender?: boolean; calldataLength?: number } { + const result: { onlySelf?: boolean; rejectNullMsgSender?: boolean; calldataLength?: number } = {}; + for (const flag of flags.split('+')) { + if (flag === 'os') { + result.onlySelf = true; + } else if (flag === 'rn') { + result.rejectNullMsgSender = true; + } else if (flag.startsWith('cl=')) { + const n = parseInt(flag.slice(3), 10); + if (isNaN(n) || n < 0) { + throw new Error( + `Invalid allow list entry "${entry}": invalid calldataLength in flag "${flag}". Expected a non-negative integer.`, + ); + } + result.calldataLength = n; + } else { + throw new Error(`Invalid allow list entry "${entry}": unknown flag "${flag}". Supported flags: os, rn, cl=N.`); + } + } + return result; +} + /** * Parses a string to a list of allowed elements. * Each entry is expected to be of one of the following formats: * `I:${address}:${selector}` — instance (contract address) with function selector * `C:${classId}:${selector}` — class with function selector * + * An optional flags segment can be appended after the selector: + * `I:${address}:${selector}:${flags}` or `C:${classId}:${selector}:${flags}` + * where flags is a `+`-separated list of: `os` (onlySelf), `rn` (rejectNullMsgSender), `cl=N` (calldataLength). + * * @param value The string to parse * @returns A list of allowed elements */ @@ -562,7 +595,7 @@ export function parseAllowList(value: string): AllowedElement[] { if (!trimmed) { continue; } - const [typeString, identifierString, selectorString] = trimmed.split(':'); + const [typeString, identifierString, selectorString, flagsString] = trimmed.split(':'); if (!selectorString) { throw new Error( @@ -571,16 +604,19 @@ export function parseAllowList(value: string): AllowedElement[] { } const selector = FunctionSelector.fromString(selectorString); + const flags = flagsString ? parseFlags(flagsString, trimmed) : {}; if (typeString === 'I') { entries.push({ address: AztecAddress.fromString(identifierString), selector, + ...flags, }); } else if (typeString === 'C') { entries.push({ classId: Fr.fromHexString(identifierString), selector, + ...flags, }); } else { throw new Error( diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts index 6e78567c039c..b8709732a192 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts @@ -1,10 +1,8 @@ -import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { AuthRegistryArtifact } from '@aztec/protocol-contracts/auth-registry'; import { FeeJuiceArtifact } from '@aztec/protocol-contracts/fee-juice'; import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; import type { ContractArtifact, FunctionAbi } from '@aztec/stdlib/abi'; -import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; /** Returns the expected calldata length for a function: 1 (selector) + arguments size. */ @@ -24,13 +22,9 @@ let defaultAllowedSetupFunctions: AllowedElement[] | undefined; /** Returns the default list of functions allowed to run in the setup phase of a transaction. */ export async function getDefaultAllowedSetupFunctions(): Promise { if (defaultAllowedSetupFunctions === undefined) { - const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; const setAuthorizedInternalSelector = await FunctionSelector.fromSignature('_set_authorized((Field),Field,bool)'); const setAuthorizedSelector = await FunctionSelector.fromSignature('set_authorized(Field,bool)'); const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); - const transferInPublicSelector = await FunctionSelector.fromSignature( - 'transfer_in_public((Field),(Field),u128,Field)', - ); defaultAllowedSetupFunctions = [ // AuthRegistry: needed for authwit support via private path (set_authorized_private enqueues _set_authorized) @@ -55,19 +49,6 @@ export async function getDefaultAllowedSetupFunctions(): Promise Date: Thu, 5 Mar 2026 13:38:03 +0000 Subject: [PATCH 29/37] chore: More updated Alpha configuration (#21155) This PR applies configuration settings for Alpha. --- spartan/environments/alpha-net.env | 2 +- spartan/environments/five-tps-long-epoch.env | 2 +- spartan/environments/five-tps-short-epoch.env | 2 +- spartan/environments/network-defaults.yml | 10 +++++----- spartan/environments/next-net.env | 4 ++-- spartan/environments/prove-n-tps-fake.env | 2 +- spartan/environments/prove-n-tps-real.env | 2 +- spartan/environments/staging-public.env | 3 +-- spartan/environments/ten-tps-long-epoch.env | 2 +- spartan/environments/ten-tps-short-epoch.env | 2 +- spartan/environments/testnet.env | 1 + spartan/environments/tps-scenario.env | 2 +- spartan/scripts/deploy_network.sh | 4 +++- spartan/terraform/deploy-aztec-infra/main.tf | 1 + spartan/terraform/deploy-aztec-infra/variables.tf | 6 ++++++ 15 files changed, 27 insertions(+), 18 deletions(-) diff --git a/spartan/environments/alpha-net.env b/spartan/environments/alpha-net.env index fad91bbe53f5..d177ed4baeaa 100644 --- a/spartan/environments/alpha-net.env +++ b/spartan/environments/alpha-net.env @@ -67,7 +67,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=10 +SEQ_MAX_TX_PER_CHECKPOINT=72 SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests diff --git a/spartan/environments/five-tps-long-epoch.env b/spartan/environments/five-tps-long-epoch.env index a5d3ac217a99..aad096238ac8 100644 --- a/spartan/environments/five-tps-long-epoch.env +++ b/spartan/environments/five-tps-long-epoch.env @@ -54,7 +54,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=180 +SEQ_MAX_TX_PER_CHECKPOINT=180 SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests diff --git a/spartan/environments/five-tps-short-epoch.env b/spartan/environments/five-tps-short-epoch.env index 6ac77948ca01..c35ed34e45c9 100644 --- a/spartan/environments/five-tps-short-epoch.env +++ b/spartan/environments/five-tps-short-epoch.env @@ -54,7 +54,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=180 +SEQ_MAX_TX_PER_CHECKPOINT=180 SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests diff --git a/spartan/environments/network-defaults.yml b/spartan/environments/network-defaults.yml index 379c79c7c26e..068f1df60df9 100644 --- a/spartan/environments/network-defaults.yml +++ b/spartan/environments/network-defaults.yml @@ -275,11 +275,11 @@ networks: SPONSORED_FPC: true TRANSACTIONS_DISABLED: false # Sequencer - # Gives ~0.1 TPS @ 72s slot time, 36s publish time, 6s block time - max 4 blocks per slot - SEQ_MAX_TX_PER_BLOCK: 2 + SEQ_MAX_TX_PER_CHECKPOINT: 72 # 1 TPS # Prover PROVER_REAL_PROOFS: true # P2P + P2P_MAX_PENDING_TX_COUNT: 1000 P2P_TX_POOL_DELETE_TXS_AFTER_REORG: true # Slasher penalties SLASH_PRUNE_PENALTY: 10e18 @@ -326,16 +326,16 @@ networks: # Genesis state - no test accounts, no sponsored FPC TEST_ACCOUNTS: false SPONSORED_FPC: false - TRANSACTIONS_DISABLED: true # Initially disabled + TRANSACTIONS_DISABLED: false # Sequencer - SEQ_MAX_TX_PER_BLOCK: 0 + SEQ_MAX_TX_PER_CHECKPOINT: 72 # Prover PROVER_REAL_PROOFS: true # Sync SYNC_SNAPSHOTS_URLS: "https://aztec-labs-snapshots.com/mainnet/" BLOB_ALLOW_EMPTY_SOURCES: true # P2P - P2P_MAX_PENDING_TX_COUNT: 0 + P2P_MAX_PENDING_TX_COUNT: 1000 P2P_TX_POOL_DELETE_TXS_AFTER_REORG: true # Telemetry PUBLIC_OTEL_EXPORTER_OTLP_METRICS_ENDPOINT: "" diff --git a/spartan/environments/next-net.env b/spartan/environments/next-net.env index f01c43e4d19a..b27e1903029f 100644 --- a/spartan/environments/next-net.env +++ b/spartan/environments/next-net.env @@ -29,8 +29,8 @@ TEST_ACCOUNTS=true SPONSORED_FPC=true SEQ_MIN_TX_PER_BLOCK=0 -# Gives ~0.1 TPS @ 72s slot time, 36s publish time, 6s block time - max 4 blocks per slot -SEQ_MAX_TX_PER_BLOCK=2 + +SEQ_MAX_TX_PER_CHECKPOINT=7 # Build checkpoint even if block is empty. SEQ_BUILD_CHECKPOINT_IF_EMPTY=true diff --git a/spartan/environments/prove-n-tps-fake.env b/spartan/environments/prove-n-tps-fake.env index 2fe45c46745e..4903ff3b0bdd 100644 --- a/spartan/environments/prove-n-tps-fake.env +++ b/spartan/environments/prove-n-tps-fake.env @@ -38,7 +38,7 @@ PUBLISHERS_PER_PROVER=1 PROVER_TEST_DELAY_TYPE=realistic PROVER_TEST_VERIFICATION_DELAY_MS=250 -SEQ_MAX_TX_PER_BLOCK=80 +SEQ_MAX_TX_PER_CHECKPOINT=80 SEQ_MIN_TX_PER_BLOCK=0 P2P_MAX_TX_POOL_SIZE=1000000000 DEBUG_P2P_INSTRUMENT_MESSAGES=true diff --git a/spartan/environments/prove-n-tps-real.env b/spartan/environments/prove-n-tps-real.env index cc6442210832..e76e375845df 100644 --- a/spartan/environments/prove-n-tps-real.env +++ b/spartan/environments/prove-n-tps-real.env @@ -36,7 +36,7 @@ PROVER_PUBLISHER_MNEMONIC_START_INDEX=8000 PROVER_AGENT_POLL_INTERVAL_MS=10000 PUBLISHERS_PER_PROVER=1 -SEQ_MAX_TX_PER_BLOCK=18 +SEQ_MAX_TX_PER_CHECKPOINT=72 SEQ_MIN_TX_PER_BLOCK=0 SEQ_BLOCK_DURATION_MS=6000 SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT=36 diff --git a/spartan/environments/staging-public.env b/spartan/environments/staging-public.env index 40fbc48e4f3b..41a53217da1f 100644 --- a/spartan/environments/staging-public.env +++ b/spartan/environments/staging-public.env @@ -26,8 +26,7 @@ TEST_ACCOUNTS=false SPONSORED_FPC=true SEQ_MIN_TX_PER_BLOCK=0 -# Gives ~0.1 TPS @ 72s slot time, 36s publish time, 6s block time - max 4 blocks per slot -SEQ_MAX_TX_PER_BLOCK=2 +SEQ_MAX_TX_PER_CHECKPOINT=7 # 0.1 TPS # Build checkpoint even if block is empty. SEQ_BUILD_CHECKPOINT_IF_EMPTY=true diff --git a/spartan/environments/ten-tps-long-epoch.env b/spartan/environments/ten-tps-long-epoch.env index a91cb0995253..ed85befac3a3 100644 --- a/spartan/environments/ten-tps-long-epoch.env +++ b/spartan/environments/ten-tps-long-epoch.env @@ -54,7 +54,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=360 +SEQ_MAX_TX_PER_CHECKPOINT=360 SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests diff --git a/spartan/environments/ten-tps-short-epoch.env b/spartan/environments/ten-tps-short-epoch.env index 4114d3b0a928..e475bee370d8 100644 --- a/spartan/environments/ten-tps-short-epoch.env +++ b/spartan/environments/ten-tps-short-epoch.env @@ -54,7 +54,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=360 +SEQ_MAX_TX_PER_CHECKPOINT=360 SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests diff --git a/spartan/environments/testnet.env b/spartan/environments/testnet.env index f72af734d6a2..eeb37b31e2eb 100644 --- a/spartan/environments/testnet.env +++ b/spartan/environments/testnet.env @@ -64,6 +64,7 @@ BOT_TRANSFERS_FOLLOW_CHAIN=PENDING BOT_SWAPS_REPLICAS=0 P2P_TX_POOL_DELETE_TXS_AFTER_REORG=true +SEQ_MAX_TX_PER_CHECKPOINT=72 DEPLOY_ARCHIVAL_NODE=true diff --git a/spartan/environments/tps-scenario.env b/spartan/environments/tps-scenario.env index 9ebddc63fb33..c99e2e488f91 100644 --- a/spartan/environments/tps-scenario.env +++ b/spartan/environments/tps-scenario.env @@ -66,7 +66,7 @@ AZTEC_SLASHING_EXECUTION_DELAY_IN_ROUNDS=0 AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 -SEQ_MAX_TX_PER_BLOCK=15 # approx 0.2 TPS +SEQ_MAX_TX_PER_CHECKPOINT=15 # approx 0.2 TPS SEQ_MIN_TX_PER_BLOCK=0 # Override L1 tx utils bump percentages for scenario tests diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index 4984fbaa2daa..b7d4f5dc28de 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -105,7 +105,8 @@ fi PROVER_FAILED_PROOF_STORE=${PROVER_FAILED_PROOF_STORE:-} SEQ_MIN_TX_PER_BLOCK=${SEQ_MIN_TX_PER_BLOCK:-0} -SEQ_MAX_TX_PER_BLOCK=${SEQ_MAX_TX_PER_BLOCK:-8} +SEQ_MAX_TX_PER_BLOCK=${SEQ_MAX_TX_PER_BLOCK:-null} +SEQ_MAX_TX_PER_CHECKPOINT=${SEQ_MAX_TX_PER_CHECKPOINT:-8} SEQ_BLOCK_DURATION_MS=${SEQ_BLOCK_DURATION_MS:-} SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT=${SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT:-} SEQ_BUILD_CHECKPOINT_IF_EMPTY=${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-} @@ -530,6 +531,7 @@ VALIDATOR_PUBLISHERS_PER_REPLICA = ${VALIDATOR_PUBLISHERS_PER_REPLICA} VALIDATOR_HA_REPLICAS = ${VALIDATOR_HA_REPLICAS} SEQ_MIN_TX_PER_BLOCK = ${SEQ_MIN_TX_PER_BLOCK} SEQ_MAX_TX_PER_BLOCK = ${SEQ_MAX_TX_PER_BLOCK} +SEQ_MAX_TX_PER_CHECKPOINT = ${SEQ_MAX_TX_PER_CHECKPOINT} SEQ_BLOCK_DURATION_MS = ${SEQ_BLOCK_DURATION_MS:-null} SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT = ${SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT:-null} SEQ_BUILD_CHECKPOINT_IF_EMPTY = ${SEQ_BUILD_CHECKPOINT_IF_EMPTY:-null} diff --git a/spartan/terraform/deploy-aztec-infra/main.tf b/spartan/terraform/deploy-aztec-infra/main.tf index fc46ee5e3b94..6b039f742b37 100644 --- a/spartan/terraform/deploy-aztec-infra/main.tf +++ b/spartan/terraform/deploy-aztec-infra/main.tf @@ -212,6 +212,7 @@ locals { "validator.node.proverRealProofs" = var.PROVER_REAL_PROOFS "validator.node.env.SEQ_MIN_TX_PER_BLOCK" = var.SEQ_MIN_TX_PER_BLOCK "validator.node.env.SEQ_MAX_TX_PER_BLOCK" = var.SEQ_MAX_TX_PER_BLOCK + "validator.node.env.SEQ_MAX_TX_PER_CHECKPOINT" = var.SEQ_MAX_TX_PER_CHECKPOINT "validator.node.env.SEQ_BLOCK_DURATION_MS" = var.SEQ_BLOCK_DURATION_MS "validator.node.env.SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT" = var.SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT "validator.node.env.SEQ_BUILD_CHECKPOINT_IF_EMPTY" = var.SEQ_BUILD_CHECKPOINT_IF_EMPTY diff --git a/spartan/terraform/deploy-aztec-infra/variables.tf b/spartan/terraform/deploy-aztec-infra/variables.tf index 28caede2a671..2e9bbdb09151 100644 --- a/spartan/terraform/deploy-aztec-infra/variables.tf +++ b/spartan/terraform/deploy-aztec-infra/variables.tf @@ -349,6 +349,12 @@ variable "SEQ_MAX_TX_PER_BLOCK" { default = "8" } +variable "SEQ_MAX_TX_PER_CHECKPOINT" { + description = "Maximum number of sequencer transactions per checkpoint" + type = string + default = null +} + variable "SEQ_ENFORCE_TIME_TABLE" { description = "Whether to enforce the time table when building blocks" type = string From b0d126b5ec98270b2e5909983b30b4c47085bcbe Mon Sep 17 00:00:00 2001 From: spypsy Date: Thu, 5 Mar 2026 13:40:08 +0000 Subject: [PATCH 30/37] chore: tally slashing pruning improvements (#21161) Fixes [A-607](https://linear.app/aztec-labs/issue/A-607/tally-slasher-prune-improvements) Follow-up to #20683 --- yarn-project/slasher/README.md | 2 +- .../slasher/src/tally_slasher_client.ts | 9 +-- .../stdlib/src/slashing/tally.test.ts | 68 +++++++++++++------ yarn-project/stdlib/src/slashing/tally.ts | 26 +++++-- 4 files changed, 72 insertions(+), 33 deletions(-) diff --git a/yarn-project/slasher/README.md b/yarn-project/slasher/README.md index b5270720fa9a..adb32be23305 100644 --- a/yarn-project/slasher/README.md +++ b/yarn-project/slasher/README.md @@ -185,7 +185,7 @@ These settings are configured locally on each validator node: - `slashProposeInvalidAttestationsPenalty`: Penalty for PROPOSED_INSUFFICIENT_ATTESTATIONS and PROPOSED_INCORRECT_ATTESTATIONS - `slashAttestDescendantOfInvalidPenalty`: Penalty for ATTESTED_DESCENDANT_OF_INVALID - `slashUnknownPenalty`: Default penalty for unknown offense types -- `slashMaxPayloadSize`: Maximum size of slash payloads. In the empire model this limits offenses per payload; in the tally model it limits offenses considered when building the vote for a round (same prioritization: uncontroversial first, then by amount and age), so that execution payload stays within gas limits. +- `slashMaxPayloadSize`: Maximum size of slash payloads. In the empire model this limits offenses per payload. In the tally model it limits the number of **unique validators** (across all committees and epochs in a round) that receive non-zero votes. When this cap is hit, the lowest-severity validator-epoch pairs are zeroed out first, so the most severe slashes are always preserved. Note that multiple offenses for the same validator in the same epoch are summed and counted as a single validator entry against this limit. - `slashMinPenaltyPercentage`: Agree to slashes if they are at least this percentage of the configured penalty (empire model) - `slashMaxPenaltyPercentage`: Agree to slashes if they are at most this percentage of the configured penalty (empire model) diff --git a/yarn-project/slasher/src/tally_slasher_client.ts b/yarn-project/slasher/src/tally_slasher_client.ts index d226775b7b92..862525addf63 100644 --- a/yarn-project/slasher/src/tally_slasher_client.ts +++ b/yarn-project/slasher/src/tally_slasher_client.ts @@ -362,18 +362,13 @@ export class TallySlasherClient implements ProposerSlashActionProvider, SlasherC const committees = await this.collectCommitteesActiveDuringRound(slashedRound); const epochsForCommittees = getEpochsForRound(slashedRound, this.settings); const { slashMaxPayloadSize } = this.config; - const { votes, truncatedCount } = getSlashConsensusVotesFromOffenses( + const votes = getSlashConsensusVotesFromOffenses( offensesToSlash, committees, epochsForCommittees.map(e => BigInt(e)), { ...this.settings, maxSlashedValidators: slashMaxPayloadSize }, + this.log, ); - if (truncatedCount > 0) { - this.log.warn( - `Vote truncated: ${truncatedCount} validator-epoch pairs dropped to stay within gas limit of ${slashMaxPayloadSize}`, - { slotNumber, currentRound, slashedRound }, - ); - } if (votes.every(v => v === 0)) { this.log.warn(`Computed votes for offenses are all zero. Skipping vote.`, { slotNumber, diff --git a/yarn-project/stdlib/src/slashing/tally.test.ts b/yarn-project/stdlib/src/slashing/tally.test.ts index 4c0fafc58087..b0682890d6a5 100644 --- a/yarn-project/stdlib/src/slashing/tally.test.ts +++ b/yarn-project/stdlib/src/slashing/tally.test.ts @@ -41,7 +41,7 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator1, mockValidator2, mockValidator3]]; const epochsForCommittees = [5n]; // Committee for epoch 5 - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(4); // Padded to targetCommitteeSize expect(votes[0]).toEqual(2); // Only 25n from epoch 5 offense for validator1 @@ -62,7 +62,7 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator1]]; const epochsForCommittees = [5n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(4); // Padded to targetCommitteeSize expect(votes[0]).toEqual(3); // Capped at MAX_SLASH_UNITS_PER_VALIDATOR @@ -91,7 +91,7 @@ describe('TallySlashingHelpers', () => { ]; const epochsForCommittees = [5n, 6n]; // Committees for epochs 5 and 6 - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(8); // 2 committees × 4 targetCommitteeSize expect(votes[0]).toEqual(2); // validator1 in committee1 @@ -125,7 +125,7 @@ describe('TallySlashingHelpers', () => { [mockValidator1, mockValidator3], ]; const epochsForCommittees = [5n, 6n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(8); // 2 committees × 4 targetCommitteeSize expect(votes[0]).toEqual(2); // validator1 in committee1, epoch 5 offense (20n) @@ -150,7 +150,7 @@ describe('TallySlashingHelpers', () => { const committees: EthAddress[][] = []; const epochsForCommittees: bigint[] = []; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toEqual([]); }); @@ -167,7 +167,7 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator2, mockValidator3]]; const epochsForCommittees = [5n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(4); // Padded to targetCommitteeSize expect(votes[0]).toEqual(0); // validator2 has no offenses @@ -197,7 +197,7 @@ describe('TallySlashingHelpers', () => { [mockValidator1, mockValidator3], ]; const epochsForCommittees = [5n, 6n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(8); // 2 committees × 4 targetCommitteeSize expect(votes[0]).toEqual(3); // validator1 in committee1, always-slash (30n) @@ -228,7 +228,7 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator1, mockValidator2, mockValidator3]]; const epochsForCommittees = [2n]; // Committee for epoch 2 - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(4); // Padded to targetCommitteeSize expect(votes[0]).toEqual(BlockNumber(1)); // validator1: 15n offense maps to epoch 2 @@ -255,7 +255,7 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator1, mockValidator2]]; const epochsForCommittees = [2n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(4); // Padded to targetCommitteeSize expect(votes[0]).toEqual(2); // validator1: 10n + 15n = 25n total for epoch 2 @@ -288,7 +288,7 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator1, mockValidator2]]; const epochsForCommittees = [3n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(4); // Padded to targetCommitteeSize expect(votes[0]).toEqual(2); // validator1: 8n + 7n + 5n = 20n total @@ -318,7 +318,7 @@ describe('TallySlashingHelpers', () => { [mockValidator1, mockValidator3], ]; const epochsForCommittees = [5n, 6n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(8); // 2 committees × 4 targetCommitteeSize expect(votes[0]).toEqual(3); // validator1 committee1: 20n(always) + 15n(epoch5) = 35n @@ -352,7 +352,7 @@ describe('TallySlashingHelpers', () => { [mockValidator1, mockValidator2], ]; const epochsForCommittees = [0n, 1n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(8); // 2 committees × 4 targetCommitteeSize expect(votes[0]).toEqual(BlockNumber(1)); // validator1 epoch0: 15n offense @@ -383,7 +383,7 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator1, mockValidator2, mockValidator3]]; const epochsForCommittees = [5n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(4); // Padded to targetCommitteeSize expect(votes[0]).toEqual(0); // validator1: 0n amount = 0 slash units @@ -409,7 +409,7 @@ describe('TallySlashingHelpers', () => { ]; const epochsForCommittees = [5n, 6n, 7n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); // Should be 12 elements (4 per committee), not 8 expect(votes).toHaveLength(12); @@ -437,7 +437,7 @@ describe('TallySlashingHelpers', () => { ]; const epochsForCommittees = [5n, 6n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(8); expect(votes.slice(0, 4)).toEqual([0, 0, 0, 0]); // Padded empty committee @@ -460,7 +460,7 @@ describe('TallySlashingHelpers', () => { ]; const epochsForCommittees = [5n, 6n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(8); expect(votes.slice(0, 4)).toEqual([0, 2, 0, 0]); // validator2 in first committee (20n = 2 units) @@ -477,12 +477,11 @@ describe('TallySlashingHelpers', () => { const committees = [[mockValidator1, mockValidator2, mockValidator3, mockValidator4]]; const epochsForCommittees = [5n]; // Only 2 slashed validators allowed; validator3 should be zeroed out - const { votes, truncatedCount } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, { + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, { ...settings, maxSlashedValidators: 2, }); - expect(truncatedCount).toBe(1); expect(votes).toHaveLength(4); expect(votes[0]).toEqual(3); // validator1: included (1st) expect(votes[1]).toEqual(2); // validator2: included (2nd) @@ -510,12 +509,11 @@ describe('TallySlashingHelpers', () => { const epochsForCommittees = [5n, 6n]; // Limit of 3: validator1@epoch5, validator2@epoch5, validator1@epoch6 are included; // validator3@epoch6 is zeroed out - const { votes, truncatedCount } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, { + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, { ...settings, maxSlashedValidators: 3, }); - expect(truncatedCount).toBe(1); expect(votes).toHaveLength(8); // 2 committees × 4 targetCommitteeSize expect(votes[0]).toEqual(3); // validator1 @ epoch5: included (1st) expect(votes[1]).toEqual(2); // validator2 @ epoch5: included (2nd) @@ -527,6 +525,34 @@ describe('TallySlashingHelpers', () => { expect(votes[7]).toEqual(0); // padded }); + it('truncates based on validator count, not offense count', () => { + // 3 offenses for validator1, 2 for validator2, 1 for validator3 — but only 2 validators allowed. + // Truncation must cut one validator (not one offense record). + const offenses: Offense[] = [ + { validator: mockValidator1, amount: 15n, offenseType: OffenseType.INACTIVITY, epochOrSlot: 5n }, + { validator: mockValidator1, amount: 8n, offenseType: OffenseType.DATA_WITHHOLDING, epochOrSlot: 5n }, + { validator: mockValidator1, amount: 5n, offenseType: OffenseType.VALID_EPOCH_PRUNED, epochOrSlot: 5n }, + { validator: mockValidator2, amount: 20n, offenseType: OffenseType.INACTIVITY, epochOrSlot: 5n }, + { validator: mockValidator2, amount: 5n, offenseType: OffenseType.DATA_WITHHOLDING, epochOrSlot: 5n }, + { validator: mockValidator3, amount: 10n, offenseType: OffenseType.INACTIVITY, epochOrSlot: 5n }, + ]; + + const committees = [[mockValidator1, mockValidator2, mockValidator3, mockValidator4]]; + const epochsForCommittees = [5n]; + // validator1: 15n+8n+5n=28n → 2 units, validator2: 20n+5n=25n → 2 units, validator3: 10n → 1 unit + // Limit of 2 validators: validator3 (lowest vote) is zeroed out + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, { + ...settings, + maxSlashedValidators: 2, + }); + + expect(votes).toHaveLength(4); + expect(votes[0]).toEqual(2); // validator1: 28n → 2 units, included + expect(votes[1]).toEqual(2); // validator2: 25n → 2 units, included + expect(votes[2]).toEqual(0); // validator3: 10n → 1 unit, zeroed out (only 2 validators allowed) + expect(votes[3]).toEqual(0); // validator4: no offenses + }); + it('handles multiple consecutive empty committees', () => { const offenses: Offense[] = [ { @@ -545,7 +571,7 @@ describe('TallySlashingHelpers', () => { ]; const epochsForCommittees = [5n, 6n, 7n, 8n]; - const { votes } = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); + const votes = getSlashConsensusVotesFromOffenses(offenses, committees, epochsForCommittees, settings); expect(votes).toHaveLength(16); // 4 committees × 4 targetCommitteeSize expect(votes.slice(0, 4)).toEqual([0, 0, 0, 0]); // Committee 0: no matching offenses diff --git a/yarn-project/stdlib/src/slashing/tally.ts b/yarn-project/stdlib/src/slashing/tally.ts index 63ffe7fb1c28..c22e76fe992c 100644 --- a/yarn-project/stdlib/src/slashing/tally.ts +++ b/yarn-project/stdlib/src/slashing/tally.ts @@ -1,6 +1,7 @@ import { sumBigint } from '@aztec/foundation/bigint'; import { padArrayEnd } from '@aztec/foundation/collection'; import { EthAddress } from '@aztec/foundation/eth-address'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import type { PartialBy } from '@aztec/foundation/types'; import { getEpochForOffense } from './helpers.js'; @@ -13,7 +14,8 @@ import type { Offense, ValidatorSlashVote } from './types.js'; * @param epochsForCommittees - Array of epochs corresponding to each committee * @param settings - Settings including slashingAmounts and optional validator override lists * @param settings.maxSlashedValidators - If set, limits the total number of [validator, epoch] pairs - * with non-zero votes. + * with non-zero votes. The lowest-vote pairs are zeroed out to stay within the limit. + * @param logger - Logger, logs which validators were dropped. * @returns Array of ValidatorSlashVote, where each vote is how many slash units the validator in that position should be slashed */ export function getSlashConsensusVotesFromOffenses( @@ -26,7 +28,8 @@ export function getSlashConsensusVotesFromOffenses( targetCommitteeSize: number; maxSlashedValidators?: number; }, -): { votes: ValidatorSlashVote[]; truncatedCount: number } { + logger: Logger = createLogger('slasher:tally'), +): ValidatorSlashVote[] { const { slashingAmounts, targetCommitteeSize, maxSlashedValidators } = settings; if (committees.length !== epochsForCommittees.length) { @@ -58,7 +61,7 @@ export function getSlashConsensusVotesFromOffenses( // if a cap is set, zero out the lowest-vote [validator, epoch] pairs so that the most severe slashes stay. if (maxSlashedValidators === undefined) { - return { votes, truncatedCount: 0 }; + return votes; } const nonZeroByDescendingVote = [...votes.entries()].filter(([, vote]) => vote > 0).sort(([, a], [, b]) => b - a); @@ -68,7 +71,22 @@ export function getSlashConsensusVotesFromOffenses( votes[idx] = 0; } - return { votes, truncatedCount: toTruncate.length }; + if (toTruncate.length > 0) { + const truncated = toTruncate.map(([idx]) => { + const committeeIndex = Math.floor(idx / targetCommitteeSize); + const positionInCommittee = idx % targetCommitteeSize; + return { + validator: committees[committeeIndex][positionInCommittee].toString(), + epoch: epochsForCommittees[committeeIndex], + }; + }); + logger.warn( + `Truncated ${toTruncate.length} validator-epoch pairs to stay within limit of ${maxSlashedValidators}`, + { truncated }, + ); + } + + return votes; } /** Returns the slash vote for the given amount to slash. */ From 215ac2d4036e414ab7e1856e58221b173bd093fb Mon Sep 17 00:00:00 2001 From: Nikita Meshcheriakov Date: Thu, 5 Mar 2026 11:04:34 -0300 Subject: [PATCH 31/37] fix: update dependencies (#20997) Ref: A-459 Most packages are updated via resolutions. Except `minimatch`, it's used by multiple dependencies with different major versions. boxes/yarn.lock still has minimatch 9.0.3 pinned by @typescript-eslint/typescript-estree@6.21.0 (from boxes/boxes/react using @typescript-eslint v6). Fixing this requires upgrading boxes/react to @typescript-eslint v8. | yarn.lock | Package | Old Version | New Version | |-----------------------------------|----------------------|-------------|-------------| | yarn-project/yarn.lock | rollup | 4.52.3 | 4.59.0 | | boxes/yarn.lock | rollup | 4.41.1 | 4.59.0 | | playground/yarn.lock | rollup | 4.50.1 | 4.59.0 | | barretenberg/acir_tests/yarn.lock | basic-ftp | 5.0.5 | 5.2.0 | | docs/yarn.lock | h3 | 1.15.4 | 1.15.5 | | barretenberg/docs/yarn.lock | h3 | 1.15.3 | 1.15.5 | | yarn-project/yarn.lock | systeminformation | 5.23.8 | 5.31.1 | | yarn-project/yarn.lock | node-forge | 1.3.1 | 1.3.3 | | boxes/yarn.lock | node-forge | 1.3.1 | 1.3.3 | | docs/yarn.lock | node-forge | 1.3.1 | 1.3.3 | | barretenberg/acir_tests/yarn.lock | node-forge | 1.3.1 | 1.3.3 | | barretenberg/docs/yarn.lock | node-forge | 1.3.1 | 1.3.3 | | yarn-project/yarn.lock | koa | 2.16.2 | 2.16.4 | | yarn-project/yarn.lock | serve | 14.2.4 | 14.2.6 | | boxes/yarn.lock | serve | 14.2.4 | 14.2.6 | | barretenberg/acir_tests/yarn.lock | serve | 14.2.4 | 14.2.6 | | yarn-project/yarn.lock | minimatch | 3.1.2 | 3.1.5 | | boxes/yarn.lock | minimatch | 3.1.2 | 3.1.5 | | docs/yarn.lock | minimatch | 3.1.2 | 3.1.5 | | playground/yarn.lock | minimatch | 3.1.2 | 3.1.5 | | barretenberg/docs/yarn.lock | serve-handler | 6.1.6 | 6.1.7 | | docs/yarn.lock | serve-handler | 6.1.6 | 6.1.7 | | docs/yarn.lock | minimatch | 3.1.2 | 3.1.5 | | yarn-project/yarn.lock | minimatch | 5.1.6 | 5.1.9 | | boxes/yarn.lock | minimatch | 5.1.6 | 5.1.9 | | docs/yarn.lock | minimatch | 5.1.6 | 5.1.9 | | yarn-project/yarn.lock | minimatch | 9.0.5 | 9.0.9 | | docs/yarn.lock | minimatch | 9.0.5 | 9.0.9 | | barretenberg/acir_tests/yarn.lock | minimatch | 9.0.5 | 9.0.9 | | boxes/yarn.lock | minimatch | 9.0.5 | 9.0.9 | | yarn-project/yarn.lock | serialize-javascript | 6.0.2 | 7.0.4 | | boxes/yarn.lock | serialize-javascript | 6.0.2 | 7.0.4 | | docs/yarn.lock | serialize-javascript | 6.0.2 | 7.0.4 | | barretenberg/acir_tests/yarn.lock | serialize-javascript | 6.0.2 | 7.0.4 | | barretenberg/docs/yarn.lock | serialize-javascript | 6.0.2 | 7.0.4 | | boxes/yarn.lock | axios | 1.12.2 | 1.13.6 | | docs/yarn.lock | axios | 1.12.2 | 1.13.6 | --- .../acir_tests/browser-test-app/package.json | 2 +- barretenberg/acir_tests/package.json | 5 +- barretenberg/acir_tests/yarn.lock | 160 +++---- barretenberg/docs/package.json | 19 +- barretenberg/docs/yarn.lock | 222 +++++++--- boxes/boxes/react/package.json | 2 +- boxes/package.json | 7 +- boxes/yarn.lock | 399 ++++++++++------- docs/package.json | 7 +- docs/yarn.lock | 129 +++--- playground/package.json | 3 + playground/yarn.lock | 242 +++++----- yarn-project/ivc-integration/package.json | 2 +- yarn-project/package.json | 7 +- yarn-project/yarn.lock | 414 ++++++++++-------- 15 files changed, 967 insertions(+), 653 deletions(-) diff --git a/barretenberg/acir_tests/browser-test-app/package.json b/barretenberg/acir_tests/browser-test-app/package.json index 857d7be57509..a118807386d7 100644 --- a/barretenberg/acir_tests/browser-test-app/package.json +++ b/barretenberg/acir_tests/browser-test-app/package.json @@ -15,7 +15,7 @@ "pako": "^2.1.0", "pino": "^9.5.0", "process": "^0.11.10", - "serve": "^14.2.1", + "serve": "^14.2.6", "ts-loader": "^9.5.1", "typescript": "^5.4.2", "webpack": "^5.99.6", diff --git a/barretenberg/acir_tests/package.json b/barretenberg/acir_tests/package.json index 5e443a93352f..82bcae9dd162 100644 --- a/barretenberg/acir_tests/package.json +++ b/barretenberg/acir_tests/package.json @@ -12,6 +12,9 @@ "pino": "^9.5.0" }, "resolutions": { - "ws": "^8.17.1" + "ws": "^8.17.1", + "basic-ftp": "^5.2.0", + "node-forge": "^1.3.2", + "serialize-javascript": "^7.0.3" } } diff --git a/barretenberg/acir_tests/yarn.lock b/barretenberg/acir_tests/yarn.lock index 3e4b08f0ac21..23cba15e717f 100644 --- a/barretenberg/acir_tests/yarn.lock +++ b/barretenberg/acir_tests/yarn.lock @@ -902,7 +902,7 @@ __metadata: languageName: node linkType: hard -"accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.8": +"accepts@npm:~1.3.4, accepts@npm:~1.3.8": version: 1.3.8 resolution: "accepts@npm:1.3.8" dependencies: @@ -969,15 +969,15 @@ __metadata: languageName: node linkType: hard -"ajv@npm:8.12.0": - version: 8.12.0 - resolution: "ajv@npm:8.12.0" +"ajv@npm:8.18.0": + version: 8.18.0 + resolution: "ajv@npm:8.18.0" dependencies: - fast-deep-equal: "npm:^3.1.1" + fast-deep-equal: "npm:^3.1.3" + fast-uri: "npm:^3.0.1" json-schema-traverse: "npm:^1.0.0" require-from-string: "npm:^2.0.2" - uri-js: "npm:^4.2.2" - checksum: 10c0/ac4f72adf727ee425e049bc9d8b31d4a57e1c90da8d28bcd23d60781b12fcd6fc3d68db5df16994c57b78b94eed7988f5a6b482fd376dc5b084125e20a0a622e + checksum: 10c0/e7517c426173513a07391be951879932bdf3348feaebd2199f5b901c20f99d60db8cd1591502d4d551dc82f594e82a05c4fe1c70139b15b8937f7afeaed9532f languageName: node linkType: hard @@ -1167,10 +1167,10 @@ __metadata: languageName: node linkType: hard -"basic-ftp@npm:^5.0.2": - version: 5.0.5 - resolution: "basic-ftp@npm:5.0.5" - checksum: 10c0/be983a3997749856da87b839ffce6b8ed6c7dbf91ea991d5c980d8add275f9f2926c19f80217ac3e7f353815be879371d636407ca72b038cea8cab30e53928a6 +"basic-ftp@npm:^5.2.0": + version: 5.2.0 + resolution: "basic-ftp@npm:5.2.0" + checksum: 10c0/a0f85c01deae0723021f9bf4a7be29378186fa8bba41e74ea11832fe74c187ce90c3599c3cc5ec936581cfd150020e79f4a9ed0ee9fb20b2308e69b045f3a059 languageName: node linkType: hard @@ -1261,12 +1261,12 @@ __metadata: languageName: node linkType: hard -"brace-expansion@npm:^2.0.1": - version: 2.0.1 - resolution: "brace-expansion@npm:2.0.1" +"brace-expansion@npm:^2.0.2": + version: 2.0.2 + resolution: "brace-expansion@npm:2.0.2" dependencies: balanced-match: "npm:^1.0.0" - checksum: 10c0/b358f2fe060e2d7a87aa015979ecea07f3c37d4018f8d6deb5bd4c229ad3a0384fe6029bb76cd8be63c81e516ee52d1a0673edbe2023d53a5191732ae3c3e49f + checksum: 10c0/6d117a4c793488af86b83172deb6af143e94c17bc53b0b3cec259733923b4ca84679d506ac261f4ba3c7ed37c46018e2ff442f9ce453af8643ecd64f4a54e6cf languageName: node linkType: hard @@ -1289,7 +1289,7 @@ __metadata: pako: "npm:^2.1.0" pino: "npm:^9.5.0" process: "npm:^0.11.10" - serve: "npm:^14.2.1" + serve: "npm:^14.2.6" ts-loader: "npm:^9.5.1" typescript: "npm:^5.4.2" webpack: "npm:^5.99.6" @@ -1605,7 +1605,7 @@ __metadata: languageName: node linkType: hard -"compressible@npm:~2.0.16, compressible@npm:~2.0.18": +"compressible@npm:~2.0.18": version: 2.0.18 resolution: "compressible@npm:2.0.18" dependencies: @@ -1614,18 +1614,18 @@ __metadata: languageName: node linkType: hard -"compression@npm:1.7.4": - version: 1.7.4 - resolution: "compression@npm:1.7.4" +"compression@npm:1.8.1": + version: 1.8.1 + resolution: "compression@npm:1.8.1" dependencies: - accepts: "npm:~1.3.5" - bytes: "npm:3.0.0" - compressible: "npm:~2.0.16" + bytes: "npm:3.1.2" + compressible: "npm:~2.0.18" debug: "npm:2.6.9" - on-headers: "npm:~1.0.2" - safe-buffer: "npm:5.1.2" + negotiator: "npm:~0.6.4" + on-headers: "npm:~1.1.0" + safe-buffer: "npm:5.2.1" vary: "npm:~1.1.2" - checksum: 10c0/138db836202a406d8a14156a5564fb1700632a76b6e7d1546939472895a5304f2b23c80d7a22bf44c767e87a26e070dbc342ea63bb45ee9c863354fa5556bbbc + checksum: 10c0/85114b0b91c16594dc8c671cd9b05ef5e465066a60e5a4ed8b4551661303559a896ed17bb72c4234c04064e078f6ca86a34b8690349499a43f6fc4b844475da4 languageName: node linkType: hard @@ -2297,7 +2297,7 @@ __metadata: languageName: node linkType: hard -"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": +"fast-deep-equal@npm:^3.1.3": version: 3.1.3 resolution: "fast-deep-equal@npm:3.1.3" checksum: 10c0/40dedc862eb8992c54579c66d914635afbec43350afbbe991235fdcb4e3a8d5af1b23ae7e79bef7d4882d0ecee06c3197488026998fb19f72dc95acff1d1b1d0 @@ -3377,21 +3377,21 @@ __metadata: languageName: node linkType: hard -"minimatch@npm:3.1.2": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" +"minimatch@npm:3.1.5": + version: 3.1.5 + resolution: "minimatch@npm:3.1.5" dependencies: brace-expansion: "npm:^1.1.7" - checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + checksum: 10c0/2ecbdc0d33f07bddb0315a8b5afbcb761307a8778b48f0b312418ccbced99f104a2d17d8aca7573433c70e8ccd1c56823a441897a45e384ea76ef401a26ace70 languageName: node linkType: hard "minimatch@npm:^9.0.4": - version: 9.0.5 - resolution: "minimatch@npm:9.0.5" + version: 9.0.9 + resolution: "minimatch@npm:9.0.9" dependencies: - brace-expansion: "npm:^2.0.1" - checksum: 10c0/de96cf5e35bdf0eab3e2c853522f98ffbe9a36c37797778d2665231ec1f20a9447a7e567cb640901f89e4daaa95ae5d70c65a9e8aa2bb0019b6facbc3c0575ed + brace-expansion: "npm:^2.0.2" + checksum: 10c0/0b6a58530dbb00361745aa6c8cffaba4c90f551afe7c734830bd95fd88ebf469dd7355a027824ea1d09e37181cfeb0a797fb17df60c15ac174303ac110eb7e86 languageName: node linkType: hard @@ -3609,10 +3609,10 @@ __metadata: languageName: node linkType: hard -"node-forge@npm:^1": - version: 1.3.1 - resolution: "node-forge@npm:1.3.1" - checksum: 10c0/e882819b251a4321f9fc1d67c85d1501d3004b4ee889af822fd07f64de3d1a8e272ff00b689570af0465d65d6bf5074df9c76e900e0aff23e60b847f2a46fbe8 +"node-forge@npm:^1.3.2": + version: 1.3.3 + resolution: "node-forge@npm:1.3.3" + checksum: 10c0/9c6f53b0ebb34865872cf62a35b0aef8fb337e2efc766626c2e3a0040f4c02933bf29a62ba999eb44a2aca73bd512c4eda22705a47b94654b9fb8ed53db9a1db languageName: node linkType: hard @@ -3729,6 +3729,13 @@ __metadata: languageName: node linkType: hard +"on-headers@npm:~1.1.0": + version: 1.1.0 + resolution: "on-headers@npm:1.1.0" + checksum: 10c0/2c3b6b0d68ec9adbd561dc2d61c9b14da8ac03d8a2f0fd9e97bdf0600c887d5d97f664ff3be6876cf40cda6e3c587d73a4745e10b426ac50c7664fc5a0dfc0a1 + languageName: node + linkType: hard + "once@npm:^1.3.1, once@npm:^1.4.0": version: 1.4.0 resolution: "once@npm:1.4.0" @@ -4138,13 +4145,6 @@ __metadata: languageName: node linkType: hard -"punycode@npm:^2.1.0": - version: 2.3.1 - resolution: "punycode@npm:2.3.1" - checksum: 10c0/14f76a8206bc3464f794fb2e3d3cc665ae416c01893ad7a02b23766eb07159144ee612ad67af5e84fa4479ccfe67678c4feb126b0485651b302babf66f04f9e9 - languageName: node - linkType: hard - "puppeteer-core@npm:24.22.3": version: 24.22.3 resolution: "puppeteer-core@npm:24.22.3" @@ -4192,15 +4192,6 @@ __metadata: languageName: node linkType: hard -"randombytes@npm:^2.1.0": - version: 2.1.0 - resolution: "randombytes@npm:2.1.0" - dependencies: - safe-buffer: "npm:^5.1.0" - checksum: 10c0/50395efda7a8c94f5dffab564f9ff89736064d32addf0cc7e8bf5e4166f09f8ded7a0849ca6c2d2a59478f7d90f78f20d8048bca3cdf8be09d8e8a10790388f3 - languageName: node - linkType: hard - "range-parser@npm:1.2.0": version: 1.2.0 resolution: "range-parser@npm:1.2.0" @@ -4433,20 +4424,20 @@ __metadata: languageName: node linkType: hard -"safe-buffer@npm:5.1.2, safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": - version: 5.1.2 - resolution: "safe-buffer@npm:5.1.2" - checksum: 10c0/780ba6b5d99cc9a40f7b951d47152297d0e260f0df01472a1b99d4889679a4b94a13d644f7dbc4f022572f09ae9005fa2fbb93bbbd83643316f365a3e9a45b21 - languageName: node - linkType: hard - -"safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:~5.2.0": +"safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:~5.2.0": version: 5.2.1 resolution: "safe-buffer@npm:5.2.1" checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 languageName: node linkType: hard +"safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": + version: 5.1.2 + resolution: "safe-buffer@npm:5.1.2" + checksum: 10c0/780ba6b5d99cc9a40f7b951d47152297d0e260f0df01472a1b99d4889679a4b94a13d644f7dbc4f022572f09ae9005fa2fbb93bbbd83643316f365a3e9a45b21 + languageName: node + linkType: hard + "safe-stable-stringify@npm:^2.3.1": version: 2.5.0 resolution: "safe-stable-stringify@npm:2.5.0" @@ -4529,27 +4520,25 @@ __metadata: languageName: node linkType: hard -"serialize-javascript@npm:^6.0.2": - version: 6.0.2 - resolution: "serialize-javascript@npm:6.0.2" - dependencies: - randombytes: "npm:^2.1.0" - checksum: 10c0/2dd09ef4b65a1289ba24a788b1423a035581bef60817bea1f01eda8e3bda623f86357665fe7ac1b50f6d4f583f97db9615b3f07b2a2e8cbcb75033965f771dd2 +"serialize-javascript@npm:^7.0.3": + version: 7.0.4 + resolution: "serialize-javascript@npm:7.0.4" + checksum: 10c0/f3da6f994c41306fbfabb55eefe280a46da05592939a84b0d95c84e296c92ba9e6a3d86cf7bbd71e7a59e1cfcd8481745910af109bedbd3ed853b444d32f9ee9 languageName: node linkType: hard -"serve-handler@npm:6.1.6": - version: 6.1.6 - resolution: "serve-handler@npm:6.1.6" +"serve-handler@npm:6.1.7": + version: 6.1.7 + resolution: "serve-handler@npm:6.1.7" dependencies: bytes: "npm:3.0.0" content-disposition: "npm:0.5.2" mime-types: "npm:2.1.18" - minimatch: "npm:3.1.2" + minimatch: "npm:3.1.5" path-is-inside: "npm:1.0.2" path-to-regexp: "npm:3.3.0" range-parser: "npm:1.2.0" - checksum: 10c0/1e1cb6bbc51ee32bc1505f2e0605bdc2e96605c522277c977b67f83be9d66bd1eec8604388714a4d728e036d86b629bc9aec02120ea030d3d2c3899d44696503 + checksum: 10c0/35afb68d81afd3c38d15792a5bc2451915b739bef2898a47ebd190db6a4e29846530ac00292b8008fe7297a819257c3948be2deaf4ffd32c96689e8947cf0ae9 languageName: node linkType: hard @@ -4580,24 +4569,24 @@ __metadata: languageName: node linkType: hard -"serve@npm:^14.2.1": - version: 14.2.4 - resolution: "serve@npm:14.2.4" +"serve@npm:^14.2.6": + version: 14.2.6 + resolution: "serve@npm:14.2.6" dependencies: "@zeit/schemas": "npm:2.36.0" - ajv: "npm:8.12.0" + ajv: "npm:8.18.0" arg: "npm:5.0.2" boxen: "npm:7.0.0" chalk: "npm:5.0.1" chalk-template: "npm:0.4.0" clipboardy: "npm:3.0.0" - compression: "npm:1.7.4" + compression: "npm:1.8.1" is-port-reachable: "npm:4.0.0" - serve-handler: "npm:6.1.6" + serve-handler: "npm:6.1.7" update-check: "npm:1.5.4" bin: serve: build/main.js - checksum: 10c0/93abecd6214228d529065040f7c0cbe541c1cc321c6a94b8a968f45a519bd9c46a9fd5e45a9b24a1f5736c5b547b8fa60d5414ebc78f870e29431b64165c1d06 + checksum: 10c0/7e1668e0d187719dbe4f3de967012ce2263c967f6135d9c630f803b0f173334e1442ab326fcc4c8e6cd4e293d8bd8c773aebab2746ecaa0fb1ab29a36079763b languageName: node linkType: hard @@ -5310,15 +5299,6 @@ __metadata: languageName: node linkType: hard -"uri-js@npm:^4.2.2": - version: 4.4.1 - resolution: "uri-js@npm:4.4.1" - dependencies: - punycode: "npm:^2.1.0" - checksum: 10c0/4ef57b45aa820d7ac6496e9208559986c665e49447cb072744c13b66925a362d96dd5a46c4530a6b8e203e5db5fe849369444440cb22ecfc26c679359e5dfa3c - languageName: node - linkType: hard - "util-deprecate@npm:^1.0.1, util-deprecate@npm:~1.0.1": version: 1.0.2 resolution: "util-deprecate@npm:1.0.2" diff --git a/barretenberg/docs/package.json b/barretenberg/docs/package.json index a11671aee17c..04147dbbb2bb 100644 --- a/barretenberg/docs/package.json +++ b/barretenberg/docs/package.json @@ -34,6 +34,7 @@ "react-dom": "^19.0.0", "rehype-katex": "^7.0.1", "remark-math": "^6.0.0", + "serve": "^14.2.6", "tsx": "^4.19.3" }, "devDependencies": { @@ -44,20 +45,24 @@ "@swc/core": "^1.10.0", "@swc/jest": "^0.2.37", "@types/jest": "^30.0.0", + "@typescript/native-preview": "7.0.0-dev.20251126.1", "jest": "^30.0.0", "netlify-cli": "^17.23.0", "nodemon": "^3.1.10", "ts-node": "^10.9.2", "tsconfig-paths": "^4.2.0", - "typescript": "~5.6.2", - "@typescript/native-preview": "7.0.0-dev.20251126.1" + "typescript": "~5.6.2" }, "jest": { "testRegex": "./examples/.*\\.test\\.ts$", "transform": { - "^.+\\.tsx?$": ["@swc/jest"] + "^.+\\.tsx?$": [ + "@swc/jest" + ] }, - "extensionsToTreatAsEsm": [".ts"], + "extensionsToTreatAsEsm": [ + ".ts" + ], "testTimeout": 120000 }, "browserslist": { @@ -77,7 +82,11 @@ }, "resolutions": { "tar-fs": "^3.1.1", - "@docusaurus/mdx-loader/image-size": "1.2.1" + "@docusaurus/mdx-loader/image-size": "1.2.1", + "h3": "^1.15.5", + "node-forge": "^1.3.2", + "serve-handler": "^6.1.7", + "serialize-javascript": "^7.0.3" }, "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e" } diff --git a/barretenberg/docs/yarn.lock b/barretenberg/docs/yarn.lock index d4306cb115eb..4dd0ec62576a 100644 --- a/barretenberg/docs/yarn.lock +++ b/barretenberg/docs/yarn.lock @@ -6371,6 +6371,11 @@ resolved "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== +"@zeit/schemas@2.36.0": + version "2.36.0" + resolved "https://registry.yarnpkg.com/@zeit/schemas/-/schemas-2.36.0.tgz#7a1b53f4091e18d0b404873ea3e3c83589c765f2" + integrity sha512-7kjMwcChYEzMKjeex9ZFXkt1AyNov9R5HZtjBKVsmVpw7pa7ZtlCGvCBC2vnnXctaYN+aRI61HjIqeetZW5ROg== + abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" @@ -6492,6 +6497,16 @@ ajv-keywords@^5.1.0: dependencies: fast-deep-equal "^3.1.3" +ajv@8.18.0: + version "8.18.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.18.0.tgz#8864186b6738d003eb3a933172bb3833e10cefbc" + integrity sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A== + dependencies: + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" @@ -6669,6 +6684,11 @@ anymatch@^3.1.3, anymatch@~3.1.2: resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.1.0.tgz#75500a190313d95c64e871e7e4284c6ac219f0b1" integrity sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew== +arch@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/arch/-/arch-2.2.0.tgz#1bc47818f305764f23ab3306b0bfc086c5a29d11" + integrity sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ== + archiver-utils@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-2.1.0.tgz#e8a460e94b693c3e3da182a098ca6285ba9249e2" @@ -6748,16 +6768,16 @@ are-we-there-yet@^2.0.0: delegates "^1.0.0" readable-stream "^3.6.0" +arg@5.0.2, arg@^5.0.0: + version "5.0.2" + resolved "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + arg@^4.1.0: version "4.1.3" resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== -arg@^5.0.0: - version "5.0.2" - resolved "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz" - integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== - argparse@^1.0.7: version "1.0.10" resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" @@ -7136,6 +7156,20 @@ boolbase@^1.0.0: resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== +boxen@7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-7.0.0.tgz#9e5f8c26e716793fc96edcf7cf754cdf5e3fbf32" + integrity sha512-j//dBVuyacJbvW+tvZ9HuH03fZ46QcaKvvhZickZqtB271DxJ7SNRSNxrV/dZX0085m7hISRZWbzWlJvx/rHSg== + dependencies: + ansi-align "^3.0.1" + camelcase "^7.0.0" + chalk "^5.0.1" + cli-boxes "^3.0.0" + string-width "^5.1.2" + type-fest "^2.13.0" + widest-line "^4.0.1" + wrap-ansi "^8.0.1" + boxen@7.1.1, boxen@^7.0.0: version "7.1.1" resolved "https://registry.npmjs.org/boxen/-/boxen-7.1.1.tgz" @@ -7186,7 +7220,7 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -brace-expansion@^2.0.1: +brace-expansion@^2.0.1, brace-expansion@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.2.tgz#54fc53237a613d854c7bd37463aad17df87214e7" integrity sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ== @@ -7364,7 +7398,7 @@ camelcase@^6.2.0, camelcase@^6.3.0: resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== -camelcase@^7.0.1: +camelcase@^7.0.0, camelcase@^7.0.1: version "7.0.1" resolved "https://registry.npmjs.org/camelcase/-/camelcase-7.0.1.tgz" integrity sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw== @@ -7394,6 +7428,18 @@ ccount@^2.0.0: resolved "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz" integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg== +chalk-template@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/chalk-template/-/chalk-template-0.4.0.tgz#692c034d0ed62436b9062c1707fadcd0f753204b" + integrity sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg== + dependencies: + chalk "^4.1.2" + +chalk@5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.0.1.tgz#ca57d71e82bb534a296df63bbacc4a1c22b2a4b6" + integrity sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w== + chalk@5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" @@ -7645,6 +7691,15 @@ cli-width@^3.0.0: resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== +clipboardy@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/clipboardy/-/clipboardy-3.0.0.tgz#f3876247404d334c9ed01b6f269c11d09a5e3092" + integrity sha512-Su+uU5sr1jkUy1sGRpLKjKrvEOVXgSgiSInwa/qeID6aJ07yh+5NWc3h2QfjHjBnfX4LhtFcuAWKUsJ3r+fjbg== + dependencies: + arch "^2.2.0" + execa "^5.1.1" + is-wsl "^2.2.0" + clipboardy@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/clipboardy/-/clipboardy-4.0.0.tgz#e73ced93a76d19dd379ebf1f297565426dffdca1" @@ -7892,6 +7947,19 @@ compressible@~2.0.18: dependencies: mime-db ">= 1.43.0 < 2" +compression@1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.8.1.tgz#4a45d909ac16509195a9a28bd91094889c180d79" + integrity sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w== + dependencies: + bytes "3.1.2" + compressible "~2.0.18" + debug "2.6.9" + negotiator "~0.6.4" + on-headers "~1.1.0" + safe-buffer "5.2.1" + vary "~1.1.2" + compression@^1.7.4: version "1.8.0" resolved "https://registry.npmjs.org/compression/-/compression-1.8.0.tgz" @@ -8180,7 +8248,7 @@ cross-spawn@^7.0.2, cross-spawn@^7.0.3, cross-spawn@^7.0.6: shebang-command "^2.0.0" which "^2.0.1" -"crossws@>=0.2.0 <0.4.0", crossws@^0.3.4: +"crossws@>=0.2.0 <0.4.0", crossws@^0.3.5: version "0.3.5" resolved "https://registry.yarnpkg.com/crossws/-/crossws-0.3.5.tgz#daad331d44148ea6500098bc858869f3a5ab81a6" integrity sha512-ojKiDvcmByhwa8YYqbQI/hg7MEU0NC03+pSdEq4ZUnZR9xXpwk7E43SMNGkn+JxJGPFtNvQ48+vV2p+P1ml5PA== @@ -10713,19 +10781,19 @@ gzip-size@^6.0.0: dependencies: duplexer "^0.1.2" -h3@^1.10.0, h3@^1.12.0, h3@^1.15.2: - version "1.15.3" - resolved "https://registry.yarnpkg.com/h3/-/h3-1.15.3.tgz#e242ec6a7692a45caed3e4a73710cede4fb8d863" - integrity sha512-z6GknHqyX0h9aQaTx22VZDf6QyZn+0Nh+Ym8O/u0SGSkyF5cuTJYKlc8MkzW3Nzf9LE1ivcpmYC3FUGpywhuUQ== +h3@^1.10.0, h3@^1.12.0, h3@^1.15.2, h3@^1.15.5: + version "1.15.5" + resolved "https://registry.yarnpkg.com/h3/-/h3-1.15.5.tgz#e2f28d4a66a249973bb050eaddb06b9ab55506f8" + integrity sha512-xEyq3rSl+dhGX2Lm0+eFQIAzlDN6Fs0EcC4f7BNUmzaRX/PTzeuM+Tr2lHB8FoXggsQIeXLj8EDVgs5ywxyxmg== dependencies: cookie-es "^1.2.2" - crossws "^0.3.4" + crossws "^0.3.5" defu "^6.1.4" destr "^2.0.5" iron-webcrypto "^1.2.1" - node-mock-http "^1.0.0" + node-mock-http "^1.0.4" radix3 "^1.1.2" - ufo "^1.6.1" + ufo "^1.6.3" uncrypto "^0.1.3" handle-thing@^2.0.0: @@ -11717,6 +11785,11 @@ is-plain-object@^2.0.4: dependencies: isobject "^3.0.1" +is-port-reachable@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-port-reachable/-/is-port-reachable-4.0.0.tgz#dac044091ef15319c8ab2f34604d8794181f8c2d" + integrity sha512-9UoipoxYmSk6Xy7QFgRv2HDyaysmgSG75TFQs6S+3pDM7ZhKTF/bskZV+0UlABHzKjNVhPjYCLfeZUEg1wXxig== + is-regexp@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz" @@ -13765,26 +13838,26 @@ minimalistic-assert@^1.0.0: resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== +minimatch@3.1.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.5.tgz#580c88f8d5445f2bd6aa8f3cadefa0de79fbd69e" + integrity sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w== dependencies: brace-expansion "^1.1.7" minimatch@^5.0.1, minimatch@^5.1.0: - version "5.1.6" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" - integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== + version "5.1.9" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.9.tgz#1293ef15db0098b394540e8f9f744f9fda8dee4b" + integrity sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw== dependencies: brace-expansion "^2.0.1" minimatch@^9.0.0, minimatch@^9.0.4: - version "9.0.5" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" - integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + version "9.0.9" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.9.tgz#9b0cb9fcb78087f6fd7eababe2511c4d3d60574e" + integrity sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg== dependencies: - brace-expansion "^2.0.1" + brace-expansion "^2.0.2" minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" @@ -14316,10 +14389,10 @@ node-fetch@^2.6.7: dependencies: whatwg-url "^5.0.0" -node-forge@^1, node-forge@^1.3.1: - version "1.3.1" - resolved "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz" - integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== +node-forge@^1, node-forge@^1.3.1, node-forge@^1.3.2: + version "1.3.3" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.3.tgz#0ad80f6333b3a0045e827ac20b7f735f93716751" + integrity sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg== node-gyp-build-optional-packages@5.2.2: version "5.2.2" @@ -14338,10 +14411,10 @@ node-int64@^0.4.0: resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== -node-mock-http@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/node-mock-http/-/node-mock-http-1.0.1.tgz#29b4e0b08d786acadda450e8c159d3e652b3cbfd" - integrity sha512-0gJJgENizp4ghds/Ywu2FCmcRsgBTmRQzYPZm61wy+Em2sBarSka0OhQS5huLBg6od1zkNpnWMCZloQDFVvOMQ== +node-mock-http@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/node-mock-http/-/node-mock-http-1.0.4.tgz#21f2ab4ce2fe4fbe8a660d7c5195a1db85e042a4" + integrity sha512-8DY+kFsDkNXy1sJglUfuODx1/opAGJGyrTuFqEoN90oRc2Vk0ZbD4K2qmKXBBEhZQzdKHIVfEJpDU8Ak2NJEvQ== node-releases@^2.0.19: version "2.0.19" @@ -14574,6 +14647,11 @@ on-headers@^1.0.0, on-headers@~1.0.2: resolved "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== +on-headers@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.1.0.tgz#59da4f91c45f5f989c6e4bcedc5a3b0aed70ff65" + integrity sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A== + once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" @@ -16095,13 +16173,6 @@ random-bytes@~1.0.0: resolved "https://registry.yarnpkg.com/random-bytes/-/random-bytes-1.0.0.tgz#4f68a1dc0ae58bd3fb95848c30324db75d64360b" integrity sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ== -randombytes@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - range-parser@1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" @@ -16132,7 +16203,7 @@ raw-body@3.0.0: iconv-lite "0.6.3" unpipe "1.0.0" -rc@1.2.8, rc@^1.2.7: +rc@1.2.8, rc@^1.0.1, rc@^1.1.6, rc@^1.2.7: version "1.2.8" resolved "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== @@ -16482,6 +16553,14 @@ regexpu-core@^6.2.0: unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.1.0" +registry-auth-token@3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.3.2.tgz#851fd49038eecb586911115af845260eec983f20" + integrity sha512-JL39c60XlzCVgNrO+qq68FoNb56w/m7JYvGR2jT5iR1xBrUA3Mfx5Twk5rqTThPmQKMWydGmq8oFtDlxfrmxnQ== + dependencies: + rc "^1.1.6" + safe-buffer "^5.0.1" + registry-auth-token@^5.0.1, registry-auth-token@^5.0.2: version "5.1.0" resolved "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.1.0.tgz" @@ -16489,6 +16568,13 @@ registry-auth-token@^5.0.1, registry-auth-token@^5.0.2: dependencies: "@pnpm/npm-conf" "^2.1.0" +registry-url@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" + integrity sha512-ZbgR5aZEdf4UKZVBPYIgaglBmSF2Hi94s2PcIHhRGFjKYu+chjJdYfHn4rt3hB6eCKLJ8giVIIfgMa1ehDfZKA== + dependencies: + rc "^1.0.1" + registry-url@^6.0.0, registry-url@^6.0.1: version "6.0.1" resolved "https://registry.npmjs.org/registry-url/-/registry-url-6.0.1.tgz" @@ -16837,7 +16923,7 @@ rxjs@^7.5.5: dependencies: tslib "^2.1.0" -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -16986,22 +17072,20 @@ send@0.19.0: range-parser "~1.2.1" statuses "2.0.1" -serialize-javascript@^6.0.0, serialize-javascript@^6.0.1, serialize-javascript@^6.0.2: - version "6.0.2" - resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz" - integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== - dependencies: - randombytes "^2.1.0" +serialize-javascript@^6.0.0, serialize-javascript@^6.0.1, serialize-javascript@^6.0.2, serialize-javascript@^7.0.3: + version "7.0.4" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-7.0.4.tgz#c517735bd5b7631dd1fc191ee19cbb713ff8e05c" + integrity sha512-DuGdB+Po43Q5Jxwpzt1lhyFSYKryqoNjQSA9M92tyw0lyHIOur+XCalOUe0KTJpyqzT8+fQ5A0Jf7vCx/NKmIg== -serve-handler@^6.1.6: - version "6.1.6" - resolved "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.6.tgz" - integrity sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ== +serve-handler@6.1.7, serve-handler@^6.1.6, serve-handler@^6.1.7: + version "6.1.7" + resolved "https://registry.yarnpkg.com/serve-handler/-/serve-handler-6.1.7.tgz#e9bb864e87ee71e8dab874cde44d146b77e3fb78" + integrity sha512-CinAq1xWb0vR3twAv9evEU8cNWkXCb9kd5ePAHUKJBkOsUpR1wt/CvGdeca7vqumL1U5cSaeVQ6zZMxiJ3yWsg== dependencies: bytes "3.0.0" content-disposition "0.5.2" mime-types "2.1.18" - minimatch "3.1.2" + minimatch "3.1.5" path-is-inside "1.0.2" path-to-regexp "3.3.0" range-parser "1.2.0" @@ -17029,6 +17113,23 @@ serve-static@1.16.2: parseurl "~1.3.3" send "0.19.0" +serve@^14.2.6: + version "14.2.6" + resolved "https://registry.yarnpkg.com/serve/-/serve-14.2.6.tgz#b5e520dfda9b1ed3b824a8e8d4fd6f69e4c6944c" + integrity sha512-QEjUSA+sD4Rotm1znR8s50YqA3kYpRGPmtd5GlFxbaL9n/FdUNbqMhxClqdditSk0LlZyA/dhud6XNRTOC9x2Q== + dependencies: + "@zeit/schemas" "2.36.0" + ajv "8.18.0" + arg "5.0.2" + boxen "7.0.0" + chalk "5.0.1" + chalk-template "0.4.0" + clipboardy "3.0.0" + compression "1.8.1" + is-port-reachable "4.0.0" + serve-handler "6.1.7" + update-check "1.5.4" + set-blocking@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" @@ -18208,6 +18309,11 @@ ufo@^1.3.2, ufo@^1.5.4, ufo@^1.6.1: resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.6.1.tgz#ac2db1d54614d1b22c1d603e3aef44a85d8f146b" integrity sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA== +ufo@^1.6.3: + version "1.6.3" + resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.6.3.tgz#799666e4e88c122a9659805e30b9dc071c3aed4f" + integrity sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q== + uid-safe@2.1.5: version "2.1.5" resolved "https://registry.yarnpkg.com/uid-safe/-/uid-safe-2.1.5.tgz#2b3d5c7240e8fc2e58f8aa269e5ee49c0857bd3a" @@ -18470,6 +18576,14 @@ update-browserslist-db@^1.1.3: escalade "^3.2.0" picocolors "^1.1.1" +update-check@1.5.4: + version "1.5.4" + resolved "https://registry.yarnpkg.com/update-check/-/update-check-1.5.4.tgz#5b508e259558f1ad7dbc8b4b0457d4c9d28c8743" + integrity sha512-5YHsflzHP4t1G+8WGPlvKbJEbAJGCgw+Em+dGR1KmBUbr1J36SJBqlHLjR7oob7sco5hWHGQVcr9B2poIVDDTQ== + dependencies: + registry-auth-token "3.3.2" + registry-url "3.1.0" + update-notifier@7.3.1: version "7.3.1" resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-7.3.1.tgz#49af1ad6acfa0ea01c0d0f3c04047c154ead7096" diff --git a/boxes/boxes/react/package.json b/boxes/boxes/react/package.json index 02c2cdfcb03f..b74ac2c68439 100644 --- a/boxes/boxes/react/package.json +++ b/boxes/boxes/react/package.json @@ -38,7 +38,7 @@ "formik": "^2.4.3", "react": "^18.2.0", "react-dom": "^18.2.0", - "serve": "^14.2.1", + "serve": "^14.2.6", "yup": "^1.2.0" }, "devDependencies": { diff --git a/boxes/package.json b/boxes/package.json index fa22079088b0..03e4c87b41d1 100644 --- a/boxes/package.json +++ b/boxes/package.json @@ -49,13 +49,16 @@ "@aztec/sequencer-client": "link:../yarn-project/sequencer-client", "@aztec/p2p": "link:../yarn-project/p2p", "@aztec/wallets": "link:../yarn-project/wallets", - "@aztec/wallet-sdk": "link:../yarn-project/wallet-sdk" + "@aztec/wallet-sdk": "link:../yarn-project/wallet-sdk", + "rollup": "^4.59.0", + "node-forge": "^1.3.2", + "serialize-javascript": "^7.0.3" }, "dependencies": { "@inquirer/confirm": "^3.0.0", "@inquirer/input": "^2.0.0", "@inquirer/select": "^2.0.0", - "axios": "^1.12.0", + "axios": "^1.13.5", "commander": "^12.1.0", "ora": "^8.0.1", "pino": "^9.5.0", diff --git a/boxes/yarn.lock b/boxes/yarn.lock index 828a41beba7b..fb0e88f1b274 100644 --- a/boxes/yarn.lock +++ b/boxes/yarn.lock @@ -46,7 +46,7 @@ __metadata: "@inquirer/confirm": "npm:^3.0.0" "@inquirer/input": "npm:^2.0.0" "@inquirer/select": "npm:^2.0.0" - axios: "npm:^1.12.0" + axios: "npm:^1.13.5" commander: "npm:^12.1.0" ora: "npm:^8.0.1" pino: "npm:^9.5.0" @@ -1698,142 +1698,177 @@ __metadata: languageName: node linkType: hard -"@rollup/rollup-android-arm-eabi@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-android-arm-eabi@npm:4.41.1" +"@rollup/rollup-android-arm-eabi@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.59.0" conditions: os=android & cpu=arm languageName: node linkType: hard -"@rollup/rollup-android-arm64@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-android-arm64@npm:4.41.1" +"@rollup/rollup-android-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-android-arm64@npm:4.59.0" conditions: os=android & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-arm64@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-darwin-arm64@npm:4.41.1" +"@rollup/rollup-darwin-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-darwin-arm64@npm:4.59.0" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-x64@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-darwin-x64@npm:4.41.1" +"@rollup/rollup-darwin-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-darwin-x64@npm:4.59.0" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-freebsd-arm64@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-freebsd-arm64@npm:4.41.1" +"@rollup/rollup-freebsd-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-freebsd-arm64@npm:4.59.0" conditions: os=freebsd & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-freebsd-x64@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-freebsd-x64@npm:4.41.1" +"@rollup/rollup-freebsd-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-freebsd-x64@npm:4.59.0" conditions: os=freebsd & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-linux-arm-gnueabihf@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.41.1" +"@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0" conditions: os=linux & cpu=arm & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm-musleabihf@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.41.1" +"@rollup/rollup-linux-arm-musleabihf@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.59.0" conditions: os=linux & cpu=arm & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-arm64-gnu@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.41.1" +"@rollup/rollup-linux-arm64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.59.0" conditions: os=linux & cpu=arm64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm64-musl@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-arm64-musl@npm:4.41.1" +"@rollup/rollup-linux-arm64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.59.0" conditions: os=linux & cpu=arm64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-loongarch64-gnu@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-loongarch64-gnu@npm:4.41.1" +"@rollup/rollup-linux-loong64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-loong64-gnu@npm:4.59.0" conditions: os=linux & cpu=loong64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-powerpc64le-gnu@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.41.1" +"@rollup/rollup-linux-loong64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-loong64-musl@npm:4.59.0" + conditions: os=linux & cpu=loong64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-ppc64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.59.0" conditions: os=linux & cpu=ppc64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-gnu@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.41.1" +"@rollup/rollup-linux-ppc64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-ppc64-musl@npm:4.59.0" + conditions: os=linux & cpu=ppc64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.59.0" conditions: os=linux & cpu=riscv64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-musl@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.41.1" +"@rollup/rollup-linux-riscv64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.59.0" conditions: os=linux & cpu=riscv64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-s390x-gnu@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.41.1" +"@rollup/rollup-linux-s390x-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.59.0" conditions: os=linux & cpu=s390x & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-gnu@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-x64-gnu@npm:4.41.1" +"@rollup/rollup-linux-x64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.59.0" conditions: os=linux & cpu=x64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-musl@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-linux-x64-musl@npm:4.41.1" +"@rollup/rollup-linux-x64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.59.0" conditions: os=linux & cpu=x64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-win32-arm64-msvc@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.41.1" +"@rollup/rollup-openbsd-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-openbsd-x64@npm:4.59.0" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-openharmony-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-openharmony-arm64@npm:4.59.0" + conditions: os=openharmony & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-arm64-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.59.0" conditions: os=win32 & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-win32-ia32-msvc@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.41.1" +"@rollup/rollup-win32-ia32-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.59.0" conditions: os=win32 & cpu=ia32 languageName: node linkType: hard -"@rollup/rollup-win32-x64-msvc@npm:4.41.1": - version: 4.41.1 - resolution: "@rollup/rollup-win32-x64-msvc@npm:4.41.1" +"@rollup/rollup-win32-x64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-x64-gnu@npm:4.59.0" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.59.0" conditions: os=win32 & cpu=x64 languageName: node linkType: hard @@ -2138,13 +2173,20 @@ __metadata: languageName: node linkType: hard -"@types/estree@npm:*, @types/estree@npm:1.0.7, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.6": +"@types/estree@npm:*, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.6": version: 1.0.7 resolution: "@types/estree@npm:1.0.7" checksum: 10c0/be815254316882f7c40847336cd484c3bc1c3e34f710d197160d455dc9d6d050ffbf4c3bc76585dba86f737f020ab20bdb137ebe0e9116b0c86c7c0342221b8c languageName: node linkType: hard +"@types/estree@npm:1.0.8": + version: 1.0.8 + resolution: "@types/estree@npm:1.0.8" + checksum: 10c0/39d34d1afaa338ab9763f37ad6066e3f349444f9052b9676a7cc0252ef9485a41c6d81c9c4e0d26e9077993354edf25efc853f3224dd4b447175ef62bdcc86a5 + languageName: node + linkType: hard + "@types/express-serve-static-core@npm:*, @types/express-serve-static-core@npm:^5.0.0": version: 5.0.6 resolution: "@types/express-serve-static-core@npm:5.0.6" @@ -3342,7 +3384,7 @@ __metadata: languageName: node linkType: hard -"accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.8": +"accepts@npm:~1.3.4, accepts@npm:~1.3.8": version: 1.3.8 resolution: "accepts@npm:1.3.8" dependencies: @@ -3420,15 +3462,15 @@ __metadata: languageName: node linkType: hard -"ajv@npm:8.12.0": - version: 8.12.0 - resolution: "ajv@npm:8.12.0" +"ajv@npm:8.18.0": + version: 8.18.0 + resolution: "ajv@npm:8.18.0" dependencies: - fast-deep-equal: "npm:^3.1.1" + fast-deep-equal: "npm:^3.1.3" + fast-uri: "npm:^3.0.1" json-schema-traverse: "npm:^1.0.0" require-from-string: "npm:^2.0.2" - uri-js: "npm:^4.2.2" - checksum: 10c0/ac4f72adf727ee425e049bc9d8b31d4a57e1c90da8d28bcd23d60781b12fcd6fc3d68db5df16994c57b78b94eed7988f5a6b482fd376dc5b084125e20a0a622e + checksum: 10c0/e7517c426173513a07391be951879932bdf3348feaebd2199f5b901c20f99d60db8cd1591502d4d551dc82f594e82a05c4fe1c70139b15b8937f7afeaed9532f languageName: node linkType: hard @@ -3752,14 +3794,14 @@ __metadata: languageName: node linkType: hard -"axios@npm:^1.12.0": - version: 1.12.2 - resolution: "axios@npm:1.12.2" +"axios@npm:^1.13.5": + version: 1.13.6 + resolution: "axios@npm:1.13.6" dependencies: - follow-redirects: "npm:^1.15.6" - form-data: "npm:^4.0.4" + follow-redirects: "npm:^1.15.11" + form-data: "npm:^4.0.5" proxy-from-env: "npm:^1.1.0" - checksum: 10c0/80b063e318cf05cd33a4d991cea0162f3573481946f9129efb7766f38fde4c061c34f41a93a9f9521f02b7c9565ccbc197c099b0186543ac84a24580017adfed + checksum: 10c0/51fb5af055c3b85662fa97df17d986ae2c37d13bf86d50b6bb36b6b3a2dec6966a1d3a14ab3774b71707b155ae3597ed9b7babdf1a1a863d1a31840cb8e7ec71 languageName: node linkType: hard @@ -3798,7 +3840,7 @@ __metadata: react-dom: "npm:^18.2.0" react-toastify: "npm:^10.0.4" resolve-typescript-plugin: "npm:^2.0.1" - serve: "npm:^14.2.1" + serve: "npm:^14.2.6" stream-browserify: "npm:^3.0.0" style-loader: "npm:^3.3.3" ts-jest: "npm:^29.4.0" @@ -4065,6 +4107,15 @@ __metadata: languageName: node linkType: hard +"brace-expansion@npm:^2.0.2": + version: 2.0.2 + resolution: "brace-expansion@npm:2.0.2" + dependencies: + balanced-match: "npm:^1.0.0" + checksum: 10c0/6d117a4c793488af86b83172deb6af143e94c17bc53b0b3cec259733923b4ca84679d506ac261f4ba3c7ed37c46018e2ff442f9ce453af8643ecd64f4a54e6cf + languageName: node + linkType: hard + "braces@npm:^3.0.3, braces@npm:~3.0.2": version: 3.0.3 resolution: "braces@npm:3.0.3" @@ -4649,7 +4700,7 @@ __metadata: languageName: node linkType: hard -"compressible@npm:~2.0.16, compressible@npm:~2.0.18": +"compressible@npm:~2.0.18": version: 2.0.18 resolution: "compressible@npm:2.0.18" dependencies: @@ -4658,18 +4709,18 @@ __metadata: languageName: node linkType: hard -"compression@npm:1.7.4": - version: 1.7.4 - resolution: "compression@npm:1.7.4" +"compression@npm:1.8.1": + version: 1.8.1 + resolution: "compression@npm:1.8.1" dependencies: - accepts: "npm:~1.3.5" - bytes: "npm:3.0.0" - compressible: "npm:~2.0.16" + bytes: "npm:3.1.2" + compressible: "npm:~2.0.18" debug: "npm:2.6.9" - on-headers: "npm:~1.0.2" - safe-buffer: "npm:5.1.2" + negotiator: "npm:~0.6.4" + on-headers: "npm:~1.1.0" + safe-buffer: "npm:5.2.1" vary: "npm:~1.1.2" - checksum: 10c0/138db836202a406d8a14156a5564fb1700632a76b6e7d1546939472895a5304f2b23c80d7a22bf44c767e87a26e070dbc342ea63bb45ee9c863354fa5556bbbc + checksum: 10c0/85114b0b91c16594dc8c671cd9b05ef5e465066a60e5a4ed8b4551661303559a896ed17bb72c4234c04064e078f6ca86a34b8690349499a43f6fc4b844475da4 languageName: node linkType: hard @@ -6400,7 +6451,7 @@ __metadata: languageName: node linkType: hard -"follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.15.6": +"follow-redirects@npm:^1.0.0": version: 1.15.9 resolution: "follow-redirects@npm:1.15.9" peerDependenciesMeta: @@ -6410,6 +6461,16 @@ __metadata: languageName: node linkType: hard +"follow-redirects@npm:^1.15.11": + version: 1.15.11 + resolution: "follow-redirects@npm:1.15.11" + peerDependenciesMeta: + debug: + optional: true + checksum: 10c0/d301f430542520a54058d4aeeb453233c564aaccac835d29d15e050beb33f339ad67d9bddbce01739c5dc46a6716dbe3d9d0d5134b1ca203effa11a7ef092343 + languageName: node + linkType: hard + "for-each@npm:^0.3.3, for-each@npm:^0.3.5": version: 0.3.5 resolution: "for-each@npm:0.3.5" @@ -6429,16 +6490,16 @@ __metadata: languageName: node linkType: hard -"form-data@npm:^4.0.4": - version: 4.0.4 - resolution: "form-data@npm:4.0.4" +"form-data@npm:^4.0.5": + version: 4.0.5 + resolution: "form-data@npm:4.0.5" dependencies: asynckit: "npm:^0.4.0" combined-stream: "npm:^1.0.8" es-set-tostringtag: "npm:^2.1.0" hasown: "npm:^2.0.2" mime-types: "npm:^2.1.12" - checksum: 10c0/373525a9a034b9d57073e55eab79e501a714ffac02e7a9b01be1c820780652b16e4101819785e1e18f8d98f0aee866cc654d660a435c378e16a72f2e7cac9695 + checksum: 10c0/dd6b767ee0bbd6d84039db12a0fa5a2028160ffbfaba1800695713b46ae974a5f6e08b3356c3195137f8530dcd9dfcb5d5ae1eeff53d0db1e5aad863b619ce3b languageName: node linkType: hard @@ -8754,12 +8815,12 @@ __metadata: languageName: node linkType: hard -"minimatch@npm:3.1.2, minimatch@npm:^3.0.4, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" +"minimatch@npm:3.1.5, minimatch@npm:^3.0.4, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": + version: 3.1.5 + resolution: "minimatch@npm:3.1.5" dependencies: brace-expansion: "npm:^1.1.7" - checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + checksum: 10c0/2ecbdc0d33f07bddb0315a8b5afbcb761307a8778b48f0b312418ccbced99f104a2d17d8aca7573433c70e8ccd1c56823a441897a45e384ea76ef401a26ace70 languageName: node linkType: hard @@ -8773,20 +8834,20 @@ __metadata: linkType: hard "minimatch@npm:^5.0.1": - version: 5.1.6 - resolution: "minimatch@npm:5.1.6" + version: 5.1.9 + resolution: "minimatch@npm:5.1.9" dependencies: brace-expansion: "npm:^2.0.1" - checksum: 10c0/3defdfd230914f22a8da203747c42ee3c405c39d4d37ffda284dac5e45b7e1f6c49aa8be606509002898e73091ff2a3bbfc59c2c6c71d4660609f63aa92f98e3 + checksum: 10c0/4202718683815a7288b13e470160a4f9560cf392adef4f453927505817e01ef6b3476ecde13cfcaed17e7326dd3b69ad44eb2daeb19a217c5500f9277893f1d6 languageName: node linkType: hard "minimatch@npm:^9.0.4": - version: 9.0.5 - resolution: "minimatch@npm:9.0.5" + version: 9.0.9 + resolution: "minimatch@npm:9.0.9" dependencies: - brace-expansion: "npm:^2.0.1" - checksum: 10c0/de96cf5e35bdf0eab3e2c853522f98ffbe9a36c37797778d2665231ec1f20a9447a7e567cb640901f89e4daaa95ae5d70c65a9e8aa2bb0019b6facbc3c0575ed + brace-expansion: "npm:^2.0.2" + checksum: 10c0/0b6a58530dbb00361745aa6c8cffaba4c90f551afe7c734830bd95fd88ebf469dd7355a027824ea1d09e37181cfeb0a797fb17df60c15ac174303ac110eb7e86 languageName: node linkType: hard @@ -9011,10 +9072,10 @@ __metadata: languageName: node linkType: hard -"node-forge@npm:^1": - version: 1.3.1 - resolution: "node-forge@npm:1.3.1" - checksum: 10c0/e882819b251a4321f9fc1d67c85d1501d3004b4ee889af822fd07f64de3d1a8e272ff00b689570af0465d65d6bf5074df9c76e900e0aff23e60b847f2a46fbe8 +"node-forge@npm:^1.3.2": + version: 1.3.3 + resolution: "node-forge@npm:1.3.3" + checksum: 10c0/9c6f53b0ebb34865872cf62a35b0aef8fb337e2efc766626c2e3a0040f4c02933bf29a62ba999eb44a2aca73bd512c4eda22705a47b94654b9fb8ed53db9a1db languageName: node linkType: hard @@ -9233,6 +9294,13 @@ __metadata: languageName: node linkType: hard +"on-headers@npm:~1.1.0": + version: 1.1.0 + resolution: "on-headers@npm:1.1.0" + checksum: 10c0/2c3b6b0d68ec9adbd561dc2d61c9b14da8ac03d8a2f0fd9e97bdf0600c887d5d97f664ff3be6876cf40cda6e3c587d73a4745e10b426ac50c7664fc5a0dfc0a1 + languageName: node + linkType: hard + "once@npm:^1.3.0, once@npm:^1.3.1, once@npm:^1.4.0": version: 1.4.0 resolution: "once@npm:1.4.0" @@ -10381,31 +10449,36 @@ __metadata: languageName: node linkType: hard -"rollup@npm:^4.20.0, rollup@npm:^4.34.9": - version: 4.41.1 - resolution: "rollup@npm:4.41.1" - dependencies: - "@rollup/rollup-android-arm-eabi": "npm:4.41.1" - "@rollup/rollup-android-arm64": "npm:4.41.1" - "@rollup/rollup-darwin-arm64": "npm:4.41.1" - "@rollup/rollup-darwin-x64": "npm:4.41.1" - "@rollup/rollup-freebsd-arm64": "npm:4.41.1" - "@rollup/rollup-freebsd-x64": "npm:4.41.1" - "@rollup/rollup-linux-arm-gnueabihf": "npm:4.41.1" - "@rollup/rollup-linux-arm-musleabihf": "npm:4.41.1" - "@rollup/rollup-linux-arm64-gnu": "npm:4.41.1" - "@rollup/rollup-linux-arm64-musl": "npm:4.41.1" - "@rollup/rollup-linux-loongarch64-gnu": "npm:4.41.1" - "@rollup/rollup-linux-powerpc64le-gnu": "npm:4.41.1" - "@rollup/rollup-linux-riscv64-gnu": "npm:4.41.1" - "@rollup/rollup-linux-riscv64-musl": "npm:4.41.1" - "@rollup/rollup-linux-s390x-gnu": "npm:4.41.1" - "@rollup/rollup-linux-x64-gnu": "npm:4.41.1" - "@rollup/rollup-linux-x64-musl": "npm:4.41.1" - "@rollup/rollup-win32-arm64-msvc": "npm:4.41.1" - "@rollup/rollup-win32-ia32-msvc": "npm:4.41.1" - "@rollup/rollup-win32-x64-msvc": "npm:4.41.1" - "@types/estree": "npm:1.0.7" +"rollup@npm:^4.59.0": + version: 4.59.0 + resolution: "rollup@npm:4.59.0" + dependencies: + "@rollup/rollup-android-arm-eabi": "npm:4.59.0" + "@rollup/rollup-android-arm64": "npm:4.59.0" + "@rollup/rollup-darwin-arm64": "npm:4.59.0" + "@rollup/rollup-darwin-x64": "npm:4.59.0" + "@rollup/rollup-freebsd-arm64": "npm:4.59.0" + "@rollup/rollup-freebsd-x64": "npm:4.59.0" + "@rollup/rollup-linux-arm-gnueabihf": "npm:4.59.0" + "@rollup/rollup-linux-arm-musleabihf": "npm:4.59.0" + "@rollup/rollup-linux-arm64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-arm64-musl": "npm:4.59.0" + "@rollup/rollup-linux-loong64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-loong64-musl": "npm:4.59.0" + "@rollup/rollup-linux-ppc64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-ppc64-musl": "npm:4.59.0" + "@rollup/rollup-linux-riscv64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-riscv64-musl": "npm:4.59.0" + "@rollup/rollup-linux-s390x-gnu": "npm:4.59.0" + "@rollup/rollup-linux-x64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-x64-musl": "npm:4.59.0" + "@rollup/rollup-openbsd-x64": "npm:4.59.0" + "@rollup/rollup-openharmony-arm64": "npm:4.59.0" + "@rollup/rollup-win32-arm64-msvc": "npm:4.59.0" + "@rollup/rollup-win32-ia32-msvc": "npm:4.59.0" + "@rollup/rollup-win32-x64-gnu": "npm:4.59.0" + "@rollup/rollup-win32-x64-msvc": "npm:4.59.0" + "@types/estree": "npm:1.0.8" fsevents: "npm:~2.3.2" dependenciesMeta: "@rollup/rollup-android-arm-eabi": @@ -10428,9 +10501,13 @@ __metadata: optional: true "@rollup/rollup-linux-arm64-musl": optional: true - "@rollup/rollup-linux-loongarch64-gnu": + "@rollup/rollup-linux-loong64-gnu": + optional: true + "@rollup/rollup-linux-loong64-musl": optional: true - "@rollup/rollup-linux-powerpc64le-gnu": + "@rollup/rollup-linux-ppc64-gnu": + optional: true + "@rollup/rollup-linux-ppc64-musl": optional: true "@rollup/rollup-linux-riscv64-gnu": optional: true @@ -10442,17 +10519,23 @@ __metadata: optional: true "@rollup/rollup-linux-x64-musl": optional: true + "@rollup/rollup-openbsd-x64": + optional: true + "@rollup/rollup-openharmony-arm64": + optional: true "@rollup/rollup-win32-arm64-msvc": optional: true "@rollup/rollup-win32-ia32-msvc": optional: true + "@rollup/rollup-win32-x64-gnu": + optional: true "@rollup/rollup-win32-x64-msvc": optional: true fsevents: optional: true bin: rollup: dist/bin/rollup - checksum: 10c0/c4d5f2257320b50dc0e035e31d8d2f78d36b7015aef2f87cc984c0a1c97ffebf14337dddeb488b4b11ae798fea6486189b77e7cf677617dcf611d97db41ebfda + checksum: 10c0/f38742da34cfee5e899302615fa157aa77cb6a2a1495e5e3ce4cc9c540d3262e235bbe60caa31562bbfe492b01fdb3e7a8c43c39d842d3293bcf843123b766fc languageName: node linkType: hard @@ -10485,13 +10568,6 @@ __metadata: languageName: node linkType: hard -"safe-buffer@npm:5.1.2, safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": - version: 5.1.2 - resolution: "safe-buffer@npm:5.1.2" - checksum: 10c0/780ba6b5d99cc9a40f7b951d47152297d0e260f0df01472a1b99d4889679a4b94a13d644f7dbc4f022572f09ae9005fa2fbb93bbbd83643316f365a3e9a45b21 - languageName: node - linkType: hard - "safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:^5.1.1, safe-buffer@npm:^5.1.2, safe-buffer@npm:^5.2.0, safe-buffer@npm:^5.2.1, safe-buffer@npm:~5.2.0": version: 5.2.1 resolution: "safe-buffer@npm:5.2.1" @@ -10499,6 +10575,13 @@ __metadata: languageName: node linkType: hard +"safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": + version: 5.1.2 + resolution: "safe-buffer@npm:5.1.2" + checksum: 10c0/780ba6b5d99cc9a40f7b951d47152297d0e260f0df01472a1b99d4889679a4b94a13d644f7dbc4f022572f09ae9005fa2fbb93bbbd83643316f365a3e9a45b21 + languageName: node + linkType: hard + "safe-push-apply@npm:^1.0.0": version: 1.0.0 resolution: "safe-push-apply@npm:1.0.0" @@ -10618,27 +10701,25 @@ __metadata: languageName: node linkType: hard -"serialize-javascript@npm:^6.0.2": - version: 6.0.2 - resolution: "serialize-javascript@npm:6.0.2" - dependencies: - randombytes: "npm:^2.1.0" - checksum: 10c0/2dd09ef4b65a1289ba24a788b1423a035581bef60817bea1f01eda8e3bda623f86357665fe7ac1b50f6d4f583f97db9615b3f07b2a2e8cbcb75033965f771dd2 +"serialize-javascript@npm:^7.0.3": + version: 7.0.4 + resolution: "serialize-javascript@npm:7.0.4" + checksum: 10c0/f3da6f994c41306fbfabb55eefe280a46da05592939a84b0d95c84e296c92ba9e6a3d86cf7bbd71e7a59e1cfcd8481745910af109bedbd3ed853b444d32f9ee9 languageName: node linkType: hard -"serve-handler@npm:6.1.6": - version: 6.1.6 - resolution: "serve-handler@npm:6.1.6" +"serve-handler@npm:6.1.7": + version: 6.1.7 + resolution: "serve-handler@npm:6.1.7" dependencies: bytes: "npm:3.0.0" content-disposition: "npm:0.5.2" mime-types: "npm:2.1.18" - minimatch: "npm:3.1.2" + minimatch: "npm:3.1.5" path-is-inside: "npm:1.0.2" path-to-regexp: "npm:3.3.0" range-parser: "npm:1.2.0" - checksum: 10c0/1e1cb6bbc51ee32bc1505f2e0605bdc2e96605c522277c977b67f83be9d66bd1eec8604388714a4d728e036d86b629bc9aec02120ea030d3d2c3899d44696503 + checksum: 10c0/35afb68d81afd3c38d15792a5bc2451915b739bef2898a47ebd190db6a4e29846530ac00292b8008fe7297a819257c3948be2deaf4ffd32c96689e8947cf0ae9 languageName: node linkType: hard @@ -10669,24 +10750,24 @@ __metadata: languageName: node linkType: hard -"serve@npm:^14.2.1": - version: 14.2.4 - resolution: "serve@npm:14.2.4" +"serve@npm:^14.2.6": + version: 14.2.6 + resolution: "serve@npm:14.2.6" dependencies: "@zeit/schemas": "npm:2.36.0" - ajv: "npm:8.12.0" + ajv: "npm:8.18.0" arg: "npm:5.0.2" boxen: "npm:7.0.0" chalk: "npm:5.0.1" chalk-template: "npm:0.4.0" clipboardy: "npm:3.0.0" - compression: "npm:1.7.4" + compression: "npm:1.8.1" is-port-reachable: "npm:4.0.0" - serve-handler: "npm:6.1.6" + serve-handler: "npm:6.1.7" update-check: "npm:1.5.4" bin: serve: build/main.js - checksum: 10c0/93abecd6214228d529065040f7c0cbe541c1cc321c6a94b8a968f45a519bd9c46a9fd5e45a9b24a1f5736c5b547b8fa60d5414ebc78f870e29431b64165c1d06 + checksum: 10c0/7e1668e0d187719dbe4f3de967012ce2263c967f6135d9c630f803b0f173334e1442ab326fcc4c8e6cd4e293d8bd8c773aebab2746ecaa0fb1ab29a36079763b languageName: node linkType: hard diff --git a/docs/package.json b/docs/package.json index 405e44c4837d..07ee97420804 100644 --- a/docs/package.json +++ b/docs/package.json @@ -91,7 +91,12 @@ "resolutions": { "tar-fs": "^3.1.1", "ws@npm:8.13.0": "npm:8.17.1", - "path-to-regexp@npm:^1.7.0": "npm:1.9.0" + "path-to-regexp@npm:^1.7.0": "npm:1.9.0", + "h3": "^1.15.5", + "node-forge": "^1.3.2", + "serve-handler": "^6.1.7", + "serialize-javascript": "^7.0.3", + "axios": "^1.13.5" }, "packageManager": "yarn@4.5.2" } diff --git a/docs/yarn.lock b/docs/yarn.lock index ed4abf35cf76..cadc2c58e266 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -9576,14 +9576,14 @@ __metadata: languageName: node linkType: hard -"axios@npm:^1.6.0, axios@npm:^1.6.8": - version: 1.12.2 - resolution: "axios@npm:1.12.2" +"axios@npm:^1.13.5": + version: 1.13.6 + resolution: "axios@npm:1.13.6" dependencies: - follow-redirects: "npm:^1.15.6" - form-data: "npm:^4.0.4" + follow-redirects: "npm:^1.15.11" + form-data: "npm:^4.0.5" proxy-from-env: "npm:^1.1.0" - checksum: 10c0/80b063e318cf05cd33a4d991cea0162f3573481946f9129efb7766f38fde4c061c34f41a93a9f9521f02b7c9565ccbc197c099b0186543ac84a24580017adfed + checksum: 10c0/51fb5af055c3b85662fa97df17d986ae2c37d13bf86d50b6bb36b6b3a2dec6966a1d3a14ab3774b71707b155ae3597ed9b7babdf1a1a863d1a31840cb8e7ec71 languageName: node linkType: hard @@ -9980,6 +9980,15 @@ __metadata: languageName: node linkType: hard +"brace-expansion@npm:^2.0.2": + version: 2.0.2 + resolution: "brace-expansion@npm:2.0.2" + dependencies: + balanced-match: "npm:^1.0.0" + checksum: 10c0/6d117a4c793488af86b83172deb6af143e94c17bc53b0b3cec259733923b4ca84679d506ac261f4ba3c7ed37c46018e2ff442f9ce453af8643ecd64f4a54e6cf + languageName: node + linkType: hard + "braces@npm:^3.0.3, braces@npm:~3.0.2": version: 3.0.3 resolution: "braces@npm:3.0.3" @@ -14010,7 +14019,7 @@ __metadata: languageName: node linkType: hard -"follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.12.1, follow-redirects@npm:^1.15.6": +"follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.12.1": version: 1.15.6 resolution: "follow-redirects@npm:1.15.6" peerDependenciesMeta: @@ -14020,6 +14029,16 @@ __metadata: languageName: node linkType: hard +"follow-redirects@npm:^1.15.11": + version: 1.15.11 + resolution: "follow-redirects@npm:1.15.11" + peerDependenciesMeta: + debug: + optional: true + checksum: 10c0/d301f430542520a54058d4aeeb453233c564aaccac835d29d15e050beb33f339ad67d9bddbce01739c5dc46a6716dbe3d9d0d5134b1ca203effa11a7ef092343 + languageName: node + linkType: hard + "foreground-child@npm:^3.1.0": version: 3.1.1 resolution: "foreground-child@npm:3.1.1" @@ -14037,16 +14056,16 @@ __metadata: languageName: node linkType: hard -"form-data@npm:^4.0.4": - version: 4.0.4 - resolution: "form-data@npm:4.0.4" +"form-data@npm:^4.0.5": + version: 4.0.5 + resolution: "form-data@npm:4.0.5" dependencies: asynckit: "npm:^0.4.0" combined-stream: "npm:^1.0.8" es-set-tostringtag: "npm:^2.1.0" hasown: "npm:^2.0.2" mime-types: "npm:^2.1.12" - checksum: 10c0/373525a9a034b9d57073e55eab79e501a714ffac02e7a9b01be1c820780652b16e4101819785e1e18f8d98f0aee866cc654d660a435c378e16a72f2e7cac9695 + checksum: 10c0/dd6b767ee0bbd6d84039db12a0fa5a2028160ffbfaba1800695713b46ae974a5f6e08b3356c3195137f8530dcd9dfcb5d5ae1eeff53d0db1e5aad863b619ce3b languageName: node linkType: hard @@ -14594,20 +14613,20 @@ __metadata: languageName: node linkType: hard -"h3@npm:^1.12.0, h3@npm:^1.15.3, h3@npm:^1.15.4": - version: 1.15.4 - resolution: "h3@npm:1.15.4" +"h3@npm:^1.15.5": + version: 1.15.5 + resolution: "h3@npm:1.15.5" dependencies: cookie-es: "npm:^1.2.2" crossws: "npm:^0.3.5" defu: "npm:^6.1.4" destr: "npm:^2.0.5" iron-webcrypto: "npm:^1.2.1" - node-mock-http: "npm:^1.0.2" + node-mock-http: "npm:^1.0.4" radix3: "npm:^1.1.2" - ufo: "npm:^1.6.1" + ufo: "npm:^1.6.3" uncrypto: "npm:^0.1.3" - checksum: 10c0/5182a722d01fe18af5cb62441aaa872b630f4e1ac2cf1782e1f442e65fdfddb85eb6723bf73a96184c2dc1f1e3771d713ef47c456a9a4e92c640b025ba91044c + checksum: 10c0/d36c05176555109aa0b42c520dc03350d5baa9fff5067075f0919920a80f966a53eff2785051203a4630f8472bec118e5e0187b186a3105eba3106087cb0ddb9 languageName: node linkType: hard @@ -18120,30 +18139,30 @@ __metadata: languageName: node linkType: hard -"minimatch@npm:3.1.2, minimatch@npm:^3.0.5, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" +"minimatch@npm:3.1.5, minimatch@npm:^3.0.5, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": + version: 3.1.5 + resolution: "minimatch@npm:3.1.5" dependencies: brace-expansion: "npm:^1.1.7" - checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + checksum: 10c0/2ecbdc0d33f07bddb0315a8b5afbcb761307a8778b48f0b312418ccbced99f104a2d17d8aca7573433c70e8ccd1c56823a441897a45e384ea76ef401a26ace70 languageName: node linkType: hard "minimatch@npm:^5.1.0": - version: 5.1.6 - resolution: "minimatch@npm:5.1.6" + version: 5.1.9 + resolution: "minimatch@npm:5.1.9" dependencies: brace-expansion: "npm:^2.0.1" - checksum: 10c0/3defdfd230914f22a8da203747c42ee3c405c39d4d37ffda284dac5e45b7e1f6c49aa8be606509002898e73091ff2a3bbfc59c2c6c71d4660609f63aa92f98e3 + checksum: 10c0/4202718683815a7288b13e470160a4f9560cf392adef4f453927505817e01ef6b3476ecde13cfcaed17e7326dd3b69ad44eb2daeb19a217c5500f9277893f1d6 languageName: node linkType: hard "minimatch@npm:^9.0.0, minimatch@npm:^9.0.3, minimatch@npm:^9.0.4": - version: 9.0.5 - resolution: "minimatch@npm:9.0.5" + version: 9.0.9 + resolution: "minimatch@npm:9.0.9" dependencies: - brace-expansion: "npm:^2.0.1" - checksum: 10c0/de96cf5e35bdf0eab3e2c853522f98ffbe9a36c37797778d2665231ec1f20a9447a7e567cb640901f89e4daaa95ae5d70c65a9e8aa2bb0019b6facbc3c0575ed + brace-expansion: "npm:^2.0.2" + checksum: 10c0/0b6a58530dbb00361745aa6c8cffaba4c90f551afe7c734830bd95fd88ebf469dd7355a027824ea1d09e37181cfeb0a797fb17df60c15ac174303ac110eb7e86 languageName: node linkType: hard @@ -18618,10 +18637,10 @@ __metadata: languageName: node linkType: hard -"node-forge@npm:^1, node-forge@npm:^1.3.1": - version: 1.3.1 - resolution: "node-forge@npm:1.3.1" - checksum: 10c0/e882819b251a4321f9fc1d67c85d1501d3004b4ee889af822fd07f64de3d1a8e272ff00b689570af0465d65d6bf5074df9c76e900e0aff23e60b847f2a46fbe8 +"node-forge@npm:^1.3.2": + version: 1.3.3 + resolution: "node-forge@npm:1.3.3" + checksum: 10c0/9c6f53b0ebb34865872cf62a35b0aef8fb337e2efc766626c2e3a0040f4c02933bf29a62ba999eb44a2aca73bd512c4eda22705a47b94654b9fb8ed53db9a1db languageName: node linkType: hard @@ -18656,10 +18675,10 @@ __metadata: languageName: node linkType: hard -"node-mock-http@npm:^1.0.2": - version: 1.0.3 - resolution: "node-mock-http@npm:1.0.3" - checksum: 10c0/663f2a13518fc89b0dc69f96ba4442b5d1ecbbf20a833283725c8d2d92286af1b634803822432985be5999317fd5f23edbf2a62335fe6dd38d6b19dd7b107559 +"node-mock-http@npm:^1.0.4": + version: 1.0.4 + resolution: "node-mock-http@npm:1.0.4" + checksum: 10c0/86e3f7453cf07ad6b8bd17cf89ff91d45f486a861cf6d891618cf29647d559cbcde1d1f90c9cc02e014ff9f7900b2fb21c96b03ea4b4a415dbe2d65badadceba languageName: node linkType: hard @@ -20869,15 +20888,6 @@ __metadata: languageName: node linkType: hard -"randombytes@npm:^2.1.0": - version: 2.1.0 - resolution: "randombytes@npm:2.1.0" - dependencies: - safe-buffer: "npm:^5.1.0" - checksum: 10c0/50395efda7a8c94f5dffab564f9ff89736064d32addf0cc7e8bf5e4166f09f8ded7a0849ca6c2d2a59478f7d90f78f20d8048bca3cdf8be09d8e8a10790388f3 - languageName: node - linkType: hard - "range-parser@npm:1.2.0": version: 1.2.0 resolution: "range-parser@npm:1.2.0" @@ -21956,7 +21966,7 @@ __metadata: languageName: node linkType: hard -"safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:~5.2.0": +"safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:~5.2.0": version: 5.2.1 resolution: "safe-buffer@npm:5.2.1" checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 @@ -22154,27 +22164,25 @@ __metadata: languageName: node linkType: hard -"serialize-javascript@npm:^6.0.0, serialize-javascript@npm:^6.0.1": - version: 6.0.2 - resolution: "serialize-javascript@npm:6.0.2" - dependencies: - randombytes: "npm:^2.1.0" - checksum: 10c0/2dd09ef4b65a1289ba24a788b1423a035581bef60817bea1f01eda8e3bda623f86357665fe7ac1b50f6d4f583f97db9615b3f07b2a2e8cbcb75033965f771dd2 +"serialize-javascript@npm:^7.0.3": + version: 7.0.4 + resolution: "serialize-javascript@npm:7.0.4" + checksum: 10c0/f3da6f994c41306fbfabb55eefe280a46da05592939a84b0d95c84e296c92ba9e6a3d86cf7bbd71e7a59e1cfcd8481745910af109bedbd3ed853b444d32f9ee9 languageName: node linkType: hard -"serve-handler@npm:^6.1.6": - version: 6.1.6 - resolution: "serve-handler@npm:6.1.6" +"serve-handler@npm:^6.1.7": + version: 6.1.7 + resolution: "serve-handler@npm:6.1.7" dependencies: bytes: "npm:3.0.0" content-disposition: "npm:0.5.2" mime-types: "npm:2.1.18" - minimatch: "npm:3.1.2" + minimatch: "npm:3.1.5" path-is-inside: "npm:1.0.2" path-to-regexp: "npm:3.3.0" range-parser: "npm:1.2.0" - checksum: 10c0/1e1cb6bbc51ee32bc1505f2e0605bdc2e96605c522277c977b67f83be9d66bd1eec8604388714a4d728e036d86b629bc9aec02120ea030d3d2c3899d44696503 + checksum: 10c0/35afb68d81afd3c38d15792a5bc2451915b739bef2898a47ebd190db6a4e29846530ac00292b8008fe7297a819257c3948be2deaf4ffd32c96689e8947cf0ae9 languageName: node linkType: hard @@ -23788,6 +23796,13 @@ __metadata: languageName: node linkType: hard +"ufo@npm:^1.6.3": + version: 1.6.3 + resolution: "ufo@npm:1.6.3" + checksum: 10c0/bf0e4ebff99e54da1b9c7182ac2f40475988b41faa881d579bc97bc2a0509672107b0a0e94c4b8d31a0ab8c4bf07f4aa0b469ac6da8536d56bda5b085ea2e953 + languageName: node + linkType: hard + "uid-safe@npm:2.1.5": version: 2.1.5 resolution: "uid-safe@npm:2.1.5" diff --git a/playground/package.json b/playground/package.json index 0cda90a8c0b0..6dbf4f3fa98a 100644 --- a/playground/package.json +++ b/playground/package.json @@ -60,5 +60,8 @@ "vite": "^7.1.4", "vite-plugin-node-polyfills": "^0.24.0", "vite-plugin-static-copy": "^3.1.2" + }, + "resolutions": { + "rollup": "^4.59.0" } } diff --git a/playground/yarn.lock b/playground/yarn.lock index 49f5556b80b9..b35c390cd3df 100644 --- a/playground/yarn.lock +++ b/playground/yarn.lock @@ -1202,149 +1202,177 @@ __metadata: languageName: node linkType: hard -"@rollup/rollup-android-arm-eabi@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-android-arm-eabi@npm:4.50.1" +"@rollup/rollup-android-arm-eabi@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.59.0" conditions: os=android & cpu=arm languageName: node linkType: hard -"@rollup/rollup-android-arm64@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-android-arm64@npm:4.50.1" +"@rollup/rollup-android-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-android-arm64@npm:4.59.0" conditions: os=android & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-arm64@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-darwin-arm64@npm:4.50.1" +"@rollup/rollup-darwin-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-darwin-arm64@npm:4.59.0" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-x64@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-darwin-x64@npm:4.50.1" +"@rollup/rollup-darwin-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-darwin-x64@npm:4.59.0" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-freebsd-arm64@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-freebsd-arm64@npm:4.50.1" +"@rollup/rollup-freebsd-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-freebsd-arm64@npm:4.59.0" conditions: os=freebsd & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-freebsd-x64@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-freebsd-x64@npm:4.50.1" +"@rollup/rollup-freebsd-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-freebsd-x64@npm:4.59.0" conditions: os=freebsd & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-linux-arm-gnueabihf@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.50.1" +"@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0" conditions: os=linux & cpu=arm & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm-musleabihf@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.50.1" +"@rollup/rollup-linux-arm-musleabihf@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.59.0" conditions: os=linux & cpu=arm & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-arm64-gnu@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.50.1" +"@rollup/rollup-linux-arm64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.59.0" conditions: os=linux & cpu=arm64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm64-musl@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-arm64-musl@npm:4.50.1" +"@rollup/rollup-linux-arm64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.59.0" conditions: os=linux & cpu=arm64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-loongarch64-gnu@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-loongarch64-gnu@npm:4.50.1" +"@rollup/rollup-linux-loong64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-loong64-gnu@npm:4.59.0" conditions: os=linux & cpu=loong64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-ppc64-gnu@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.50.1" +"@rollup/rollup-linux-loong64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-loong64-musl@npm:4.59.0" + conditions: os=linux & cpu=loong64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-ppc64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.59.0" conditions: os=linux & cpu=ppc64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-gnu@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.50.1" +"@rollup/rollup-linux-ppc64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-ppc64-musl@npm:4.59.0" + conditions: os=linux & cpu=ppc64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.59.0" conditions: os=linux & cpu=riscv64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-musl@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.50.1" +"@rollup/rollup-linux-riscv64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.59.0" conditions: os=linux & cpu=riscv64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-s390x-gnu@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.50.1" +"@rollup/rollup-linux-s390x-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.59.0" conditions: os=linux & cpu=s390x & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-gnu@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-x64-gnu@npm:4.50.1" +"@rollup/rollup-linux-x64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.59.0" conditions: os=linux & cpu=x64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-musl@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-linux-x64-musl@npm:4.50.1" +"@rollup/rollup-linux-x64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.59.0" conditions: os=linux & cpu=x64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-openharmony-arm64@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-openharmony-arm64@npm:4.50.1" +"@rollup/rollup-openbsd-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-openbsd-x64@npm:4.59.0" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-openharmony-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-openharmony-arm64@npm:4.59.0" conditions: os=openharmony & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-win32-arm64-msvc@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.50.1" +"@rollup/rollup-win32-arm64-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.59.0" conditions: os=win32 & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-win32-ia32-msvc@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.50.1" +"@rollup/rollup-win32-ia32-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.59.0" conditions: os=win32 & cpu=ia32 languageName: node linkType: hard -"@rollup/rollup-win32-x64-msvc@npm:4.50.1": - version: 4.50.1 - resolution: "@rollup/rollup-win32-x64-msvc@npm:4.50.1" +"@rollup/rollup-win32-x64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-x64-gnu@npm:4.59.0" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.59.0" conditions: os=win32 & cpu=x64 languageName: node linkType: hard @@ -2106,12 +2134,12 @@ __metadata: languageName: node linkType: hard -"brace-expansion@npm:^2.0.1": - version: 2.0.1 - resolution: "brace-expansion@npm:2.0.1" +"brace-expansion@npm:^2.0.2": + version: 2.0.2 + resolution: "brace-expansion@npm:2.0.2" dependencies: balanced-match: "npm:^1.0.0" - checksum: 10c0/b358f2fe060e2d7a87aa015979ecea07f3c37d4018f8d6deb5bd4c229ad3a0384fe6029bb76cd8be63c81e516ee52d1a0673edbe2023d53a5191732ae3c3e49f + checksum: 10c0/6d117a4c793488af86b83172deb6af143e94c17bc53b0b3cec259733923b4ca84679d506ac261f4ba3c7ed37c46018e2ff442f9ce453af8643ecd64f4a54e6cf languageName: node linkType: hard @@ -4408,20 +4436,20 @@ __metadata: linkType: hard "minimatch@npm:^3.1.2": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" + version: 3.1.5 + resolution: "minimatch@npm:3.1.5" dependencies: brace-expansion: "npm:^1.1.7" - checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + checksum: 10c0/2ecbdc0d33f07bddb0315a8b5afbcb761307a8778b48f0b312418ccbced99f104a2d17d8aca7573433c70e8ccd1c56823a441897a45e384ea76ef401a26ace70 languageName: node linkType: hard "minimatch@npm:^9.0.4": - version: 9.0.5 - resolution: "minimatch@npm:9.0.5" + version: 9.0.9 + resolution: "minimatch@npm:9.0.9" dependencies: - brace-expansion: "npm:^2.0.1" - checksum: 10c0/de96cf5e35bdf0eab3e2c853522f98ffbe9a36c37797778d2665231ec1f20a9447a7e567cb640901f89e4daaa95ae5d70c65a9e8aa2bb0019b6facbc3c0575ed + brace-expansion: "npm:^2.0.2" + checksum: 10c0/0b6a58530dbb00361745aa6c8cffaba4c90f551afe7c734830bd95fd88ebf469dd7355a027824ea1d09e37181cfeb0a797fb17df60c15ac174303ac110eb7e86 languageName: node linkType: hard @@ -5341,31 +5369,35 @@ __metadata: languageName: node linkType: hard -"rollup@npm:^4.43.0": - version: 4.50.1 - resolution: "rollup@npm:4.50.1" - dependencies: - "@rollup/rollup-android-arm-eabi": "npm:4.50.1" - "@rollup/rollup-android-arm64": "npm:4.50.1" - "@rollup/rollup-darwin-arm64": "npm:4.50.1" - "@rollup/rollup-darwin-x64": "npm:4.50.1" - "@rollup/rollup-freebsd-arm64": "npm:4.50.1" - "@rollup/rollup-freebsd-x64": "npm:4.50.1" - "@rollup/rollup-linux-arm-gnueabihf": "npm:4.50.1" - "@rollup/rollup-linux-arm-musleabihf": "npm:4.50.1" - "@rollup/rollup-linux-arm64-gnu": "npm:4.50.1" - "@rollup/rollup-linux-arm64-musl": "npm:4.50.1" - "@rollup/rollup-linux-loongarch64-gnu": "npm:4.50.1" - "@rollup/rollup-linux-ppc64-gnu": "npm:4.50.1" - "@rollup/rollup-linux-riscv64-gnu": "npm:4.50.1" - "@rollup/rollup-linux-riscv64-musl": "npm:4.50.1" - "@rollup/rollup-linux-s390x-gnu": "npm:4.50.1" - "@rollup/rollup-linux-x64-gnu": "npm:4.50.1" - "@rollup/rollup-linux-x64-musl": "npm:4.50.1" - "@rollup/rollup-openharmony-arm64": "npm:4.50.1" - "@rollup/rollup-win32-arm64-msvc": "npm:4.50.1" - "@rollup/rollup-win32-ia32-msvc": "npm:4.50.1" - "@rollup/rollup-win32-x64-msvc": "npm:4.50.1" +"rollup@npm:^4.59.0": + version: 4.59.0 + resolution: "rollup@npm:4.59.0" + dependencies: + "@rollup/rollup-android-arm-eabi": "npm:4.59.0" + "@rollup/rollup-android-arm64": "npm:4.59.0" + "@rollup/rollup-darwin-arm64": "npm:4.59.0" + "@rollup/rollup-darwin-x64": "npm:4.59.0" + "@rollup/rollup-freebsd-arm64": "npm:4.59.0" + "@rollup/rollup-freebsd-x64": "npm:4.59.0" + "@rollup/rollup-linux-arm-gnueabihf": "npm:4.59.0" + "@rollup/rollup-linux-arm-musleabihf": "npm:4.59.0" + "@rollup/rollup-linux-arm64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-arm64-musl": "npm:4.59.0" + "@rollup/rollup-linux-loong64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-loong64-musl": "npm:4.59.0" + "@rollup/rollup-linux-ppc64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-ppc64-musl": "npm:4.59.0" + "@rollup/rollup-linux-riscv64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-riscv64-musl": "npm:4.59.0" + "@rollup/rollup-linux-s390x-gnu": "npm:4.59.0" + "@rollup/rollup-linux-x64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-x64-musl": "npm:4.59.0" + "@rollup/rollup-openbsd-x64": "npm:4.59.0" + "@rollup/rollup-openharmony-arm64": "npm:4.59.0" + "@rollup/rollup-win32-arm64-msvc": "npm:4.59.0" + "@rollup/rollup-win32-ia32-msvc": "npm:4.59.0" + "@rollup/rollup-win32-x64-gnu": "npm:4.59.0" + "@rollup/rollup-win32-x64-msvc": "npm:4.59.0" "@types/estree": "npm:1.0.8" fsevents: "npm:~2.3.2" dependenciesMeta: @@ -5389,10 +5421,14 @@ __metadata: optional: true "@rollup/rollup-linux-arm64-musl": optional: true - "@rollup/rollup-linux-loongarch64-gnu": + "@rollup/rollup-linux-loong64-gnu": + optional: true + "@rollup/rollup-linux-loong64-musl": optional: true "@rollup/rollup-linux-ppc64-gnu": optional: true + "@rollup/rollup-linux-ppc64-musl": + optional: true "@rollup/rollup-linux-riscv64-gnu": optional: true "@rollup/rollup-linux-riscv64-musl": @@ -5403,19 +5439,23 @@ __metadata: optional: true "@rollup/rollup-linux-x64-musl": optional: true + "@rollup/rollup-openbsd-x64": + optional: true "@rollup/rollup-openharmony-arm64": optional: true "@rollup/rollup-win32-arm64-msvc": optional: true "@rollup/rollup-win32-ia32-msvc": optional: true + "@rollup/rollup-win32-x64-gnu": + optional: true "@rollup/rollup-win32-x64-msvc": optional: true fsevents: optional: true bin: rollup: dist/bin/rollup - checksum: 10c0/2029282826d5fb4e308be261b2c28329a4d2bd34304cc3960da69fd21d5acccd0267d6770b1656ffc8f166203ef7e865b4583d5f842a519c8ef059ac71854205 + checksum: 10c0/f38742da34cfee5e899302615fa157aa77cb6a2a1495e5e3ce4cc9c540d3262e235bbe60caa31562bbfe492b01fdb3e7a8c43c39d842d3293bcf843123b766fc languageName: node linkType: hard diff --git a/yarn-project/ivc-integration/package.json b/yarn-project/ivc-integration/package.json index 94a0dde8621b..440564d7fc67 100644 --- a/yarn-project/ivc-integration/package.json +++ b/yarn-project/ivc-integration/package.json @@ -93,7 +93,7 @@ "jest-mock-extended": "^4.0.0", "msgpackr": "^1.11.2", "resolve-typescript-plugin": "^2.0.1", - "serve": "^14.2.1", + "serve": "^14.2.6", "ts-loader": "^9.5.4", "ts-node": "^10.9.1", "typescript": "^5.3.3", diff --git a/yarn-project/package.json b/yarn-project/package.json index cc721aaa5779..caa1508cd661 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -93,6 +93,11 @@ "@aztec/noir-noir_js": "file:../noir/packages/noir_js", "jest-runner@npm:^29.7.0": "patch:jest-runner@npm%3A29.7.0#~/.yarn/patches/jest-runner-npm-29.7.0-3bc9f82b58.patch", "ws": "^8.17.1", - "d3-color": "^3.1.0" + "d3-color": "^3.1.0", + "rollup": "^4.59.0", + "systeminformation": "^5.31.0", + "node-forge": "^1.3.2", + "koa": "^2.16.4", + "serialize-javascript": "^7.0.3" } } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index eac9a7f96775..49d3009e27f0 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -1466,7 +1466,7 @@ __metadata: playwright: "npm:1.49.0" puppeteer: "npm:^24.22.3" resolve-typescript-plugin: "npm:^2.0.1" - serve: "npm:^14.2.1" + serve: "npm:^14.2.6" ts-loader: "npm:^9.5.4" ts-node: "npm:^10.9.1" tslib: "npm:^2.4.0" @@ -4004,22 +4004,6 @@ __metadata: languageName: node linkType: hard -"@isaacs/balanced-match@npm:^4.0.1": - version: 4.0.1 - resolution: "@isaacs/balanced-match@npm:4.0.1" - checksum: 10/102fbc6d2c0d5edf8f6dbf2b3feb21695a21bc850f11bc47c4f06aa83bd8884fde3fe9d6d797d619901d96865fdcb4569ac2a54c937992c48885c5e3d9967fe8 - languageName: node - linkType: hard - -"@isaacs/brace-expansion@npm:^5.0.0": - version: 5.0.0 - resolution: "@isaacs/brace-expansion@npm:5.0.0" - dependencies: - "@isaacs/balanced-match": "npm:^4.0.1" - checksum: 10/cf3b7f206aff12128214a1df764ac8cdbc517c110db85249b945282407e3dfc5c6e66286383a7c9391a059fc8e6e6a8ca82262fc9d2590bd615376141fbebd2d - languageName: node - linkType: hard - "@isaacs/cliui@npm:^8.0.2": version: 8.0.2 resolution: "@isaacs/cliui@npm:8.0.2" @@ -5989,156 +5973,177 @@ __metadata: languageName: node linkType: hard -"@rollup/rollup-android-arm-eabi@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-android-arm-eabi@npm:4.52.3" +"@rollup/rollup-android-arm-eabi@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.59.0" conditions: os=android & cpu=arm languageName: node linkType: hard -"@rollup/rollup-android-arm64@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-android-arm64@npm:4.52.3" +"@rollup/rollup-android-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-android-arm64@npm:4.59.0" conditions: os=android & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-arm64@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-darwin-arm64@npm:4.52.3" +"@rollup/rollup-darwin-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-darwin-arm64@npm:4.59.0" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-x64@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-darwin-x64@npm:4.52.3" +"@rollup/rollup-darwin-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-darwin-x64@npm:4.59.0" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-freebsd-arm64@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-freebsd-arm64@npm:4.52.3" +"@rollup/rollup-freebsd-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-freebsd-arm64@npm:4.59.0" conditions: os=freebsd & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-freebsd-x64@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-freebsd-x64@npm:4.52.3" +"@rollup/rollup-freebsd-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-freebsd-x64@npm:4.59.0" conditions: os=freebsd & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-linux-arm-gnueabihf@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.52.3" +"@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0" conditions: os=linux & cpu=arm & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm-musleabihf@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.52.3" +"@rollup/rollup-linux-arm-musleabihf@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.59.0" conditions: os=linux & cpu=arm & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-arm64-gnu@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.52.3" +"@rollup/rollup-linux-arm64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.59.0" conditions: os=linux & cpu=arm64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm64-musl@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-arm64-musl@npm:4.52.3" +"@rollup/rollup-linux-arm64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.59.0" conditions: os=linux & cpu=arm64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-loong64-gnu@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-loong64-gnu@npm:4.52.3" +"@rollup/rollup-linux-loong64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-loong64-gnu@npm:4.59.0" conditions: os=linux & cpu=loong64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-ppc64-gnu@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.52.3" +"@rollup/rollup-linux-loong64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-loong64-musl@npm:4.59.0" + conditions: os=linux & cpu=loong64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-ppc64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.59.0" conditions: os=linux & cpu=ppc64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-gnu@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.52.3" +"@rollup/rollup-linux-ppc64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-ppc64-musl@npm:4.59.0" + conditions: os=linux & cpu=ppc64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.59.0" conditions: os=linux & cpu=riscv64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-musl@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.52.3" +"@rollup/rollup-linux-riscv64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.59.0" conditions: os=linux & cpu=riscv64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-s390x-gnu@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.52.3" +"@rollup/rollup-linux-s390x-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.59.0" conditions: os=linux & cpu=s390x & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-gnu@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-x64-gnu@npm:4.52.3" +"@rollup/rollup-linux-x64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.59.0" conditions: os=linux & cpu=x64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-musl@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-linux-x64-musl@npm:4.52.3" +"@rollup/rollup-linux-x64-musl@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.59.0" conditions: os=linux & cpu=x64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-openharmony-arm64@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-openharmony-arm64@npm:4.52.3" +"@rollup/rollup-openbsd-x64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-openbsd-x64@npm:4.59.0" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-openharmony-arm64@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-openharmony-arm64@npm:4.59.0" conditions: os=openharmony & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-win32-arm64-msvc@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.52.3" +"@rollup/rollup-win32-arm64-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.59.0" conditions: os=win32 & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-win32-ia32-msvc@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.52.3" +"@rollup/rollup-win32-ia32-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.59.0" conditions: os=win32 & cpu=ia32 languageName: node linkType: hard -"@rollup/rollup-win32-x64-gnu@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-win32-x64-gnu@npm:4.52.3" +"@rollup/rollup-win32-x64-gnu@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-x64-gnu@npm:4.59.0" conditions: os=win32 & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-win32-x64-msvc@npm:4.52.3": - version: 4.52.3 - resolution: "@rollup/rollup-win32-x64-msvc@npm:4.52.3" +"@rollup/rollup-win32-x64-msvc@npm:4.59.0": + version: 4.59.0 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.59.0" conditions: os=win32 & cpu=x64 languageName: node linkType: hard @@ -9163,7 +9168,7 @@ __metadata: languageName: node linkType: hard -"accepts@npm:^1.3.5, accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.8": +"accepts@npm:^1.3.5, accepts@npm:~1.3.4, accepts@npm:~1.3.8": version: 1.3.8 resolution: "accepts@npm:1.3.8" dependencies: @@ -9293,15 +9298,15 @@ __metadata: languageName: node linkType: hard -"ajv@npm:8.12.0, ajv@npm:~8.12.0": - version: 8.12.0 - resolution: "ajv@npm:8.12.0" +"ajv@npm:8.18.0": + version: 8.18.0 + resolution: "ajv@npm:8.18.0" dependencies: - fast-deep-equal: "npm:^3.1.1" + fast-deep-equal: "npm:^3.1.3" + fast-uri: "npm:^3.0.1" json-schema-traverse: "npm:^1.0.0" require-from-string: "npm:^2.0.2" - uri-js: "npm:^4.2.2" - checksum: 10/b406f3b79b5756ac53bfe2c20852471b08e122bc1ee4cde08ae4d6a800574d9cd78d60c81c69c63ff81e4da7cd0b638fafbb2303ae580d49cf1600b9059efb85 + checksum: 10/bfed9de827a2b27c6d4084324eda76a4e32bdde27410b3e9b81d06e6f8f5c78370fc6b93fe1d869f1939ff1d7c4ae8896960995acb8425e3e9288c8884247c48 languageName: node linkType: hard @@ -9329,6 +9334,18 @@ __metadata: languageName: node linkType: hard +"ajv@npm:~8.12.0": + version: 8.12.0 + resolution: "ajv@npm:8.12.0" + dependencies: + fast-deep-equal: "npm:^3.1.1" + json-schema-traverse: "npm:^1.0.0" + require-from-string: "npm:^2.0.2" + uri-js: "npm:^4.2.2" + checksum: 10/b406f3b79b5756ac53bfe2c20852471b08e122bc1ee4cde08ae4d6a800574d9cd78d60c81c69c63ff81e4da7cd0b638fafbb2303ae580d49cf1600b9059efb85 + languageName: node + linkType: hard + "ansi-align@npm:^3.0.1": version: 3.0.1 resolution: "ansi-align@npm:3.0.1" @@ -9739,6 +9756,13 @@ __metadata: languageName: node linkType: hard +"balanced-match@npm:^4.0.2": + version: 4.0.4 + resolution: "balanced-match@npm:4.0.4" + checksum: 10/fb07bb66a0959c2843fc055838047e2a95ccebb837c519614afb067ebfdf2fa967ca8d712c35ced07f2cd26fc6f07964230b094891315ad74f11eba3d53178a0 + languageName: node + linkType: hard + "bare-events@npm:^2.5.4, bare-events@npm:^2.7.0": version: 2.7.0 resolution: "bare-events@npm:2.7.0" @@ -9997,6 +10021,24 @@ __metadata: languageName: node linkType: hard +"brace-expansion@npm:^2.0.2": + version: 2.0.2 + resolution: "brace-expansion@npm:2.0.2" + dependencies: + balanced-match: "npm:^1.0.0" + checksum: 10/01dff195e3646bc4b0d27b63d9bab84d2ebc06121ff5013ad6e5356daa5a9d6b60fa26cf73c74797f2dc3fbec112af13578d51f75228c1112b26c790a87b0488 + languageName: node + linkType: hard + +"brace-expansion@npm:^5.0.2": + version: 5.0.4 + resolution: "brace-expansion@npm:5.0.4" + dependencies: + balanced-match: "npm:^4.0.2" + checksum: 10/cfd57e20d8ded9578149e47ae4d3fff2b2f78d06b54a32a73057bddff65c8e9b930613f0cbcfefedf12dd117151e19d4da16367d5127c54f3bff02d8a4479bb2 + languageName: node + linkType: hard + "braces@npm:^3.0.3, braces@npm:~3.0.2": version: 3.0.3 resolution: "braces@npm:3.0.3" @@ -10920,7 +10962,7 @@ __metadata: languageName: node linkType: hard -"compressible@npm:^2.0.18, compressible@npm:~2.0.16, compressible@npm:~2.0.18": +"compressible@npm:^2.0.18, compressible@npm:~2.0.18": version: 2.0.18 resolution: "compressible@npm:2.0.18" dependencies: @@ -10929,18 +10971,18 @@ __metadata: languageName: node linkType: hard -"compression@npm:1.7.4": - version: 1.7.4 - resolution: "compression@npm:1.7.4" +"compression@npm:1.8.1": + version: 1.8.1 + resolution: "compression@npm:1.8.1" dependencies: - accepts: "npm:~1.3.5" - bytes: "npm:3.0.0" - compressible: "npm:~2.0.16" + bytes: "npm:3.1.2" + compressible: "npm:~2.0.18" debug: "npm:2.6.9" - on-headers: "npm:~1.0.2" - safe-buffer: "npm:5.1.2" + negotiator: "npm:~0.6.4" + on-headers: "npm:~1.1.0" + safe-buffer: "npm:5.2.1" vary: "npm:~1.1.2" - checksum: 10/469cd097908fe1d3ff146596d4c24216ad25eabb565c5456660bdcb3a14c82ebc45c23ce56e19fc642746cf407093b55ab9aa1ac30b06883b27c6c736e6383c2 + checksum: 10/e7552bfbd780f2003c6fe8decb44561f5cc6bc82f0c61e81122caff5ec656f37824084f52155b1e8ef31d7656cecbec9a2499b7a68e92e20780ffb39b479abb7 languageName: node linkType: hard @@ -16073,9 +16115,9 @@ __metadata: languageName: node linkType: hard -"koa@npm:^2.16.1": - version: 2.16.2 - resolution: "koa@npm:2.16.2" +"koa@npm:^2.16.4": + version: 2.16.4 + resolution: "koa@npm:2.16.4" dependencies: accepts: "npm:^1.3.5" cache-content-type: "npm:^1.0.0" @@ -16100,7 +16142,7 @@ __metadata: statuses: "npm:^1.5.0" type-is: "npm:^1.6.16" vary: "npm:^1.1.2" - checksum: 10/741e389b31a36752c47543f06bcb9b63c19b2649487a266bd83ff363e651d864bbff5d1154a0b310f5db4d11e03060440df0113bad3ef82ac9f051874c789dee + checksum: 10/f49e76c2cb7db4facbf215eef964c1eb3f0012c2f64490dfd9b349727e11c7f429f4bf16a47f725e41325415ffebefab0ca6ece3b1187518b42f979e4dbf6e01 languageName: node linkType: hard @@ -16856,39 +16898,39 @@ __metadata: languageName: node linkType: hard -"minimatch@npm:3.1.2, minimatch@npm:^3.0.4, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" +"minimatch@npm:3.1.5, minimatch@npm:^3.0.4, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": + version: 3.1.5 + resolution: "minimatch@npm:3.1.5" dependencies: brace-expansion: "npm:^1.1.7" - checksum: 10/e0b25b04cd4ec6732830344e5739b13f8690f8a012d73445a4a19fbc623f5dd481ef7a5827fde25954cd6026fede7574cc54dc4643c99d6c6b653d6203f94634 + checksum: 10/b11a7ee5773cd34c1a0c8436cdbe910901018fb4b6cb47aa508a18d567f6efd2148507959e35fba798389b161b8604a2d704ccef751ea36bd4582f9852b7d63f languageName: node linkType: hard "minimatch@npm:^10.1.1, minimatch@npm:^9.0.3 || ^10.0.1": - version: 10.1.1 - resolution: "minimatch@npm:10.1.1" + version: 10.2.4 + resolution: "minimatch@npm:10.2.4" dependencies: - "@isaacs/brace-expansion": "npm:^5.0.0" - checksum: 10/110f38921ea527022e90f7a5f43721838ac740d0a0c26881c03b57c261354fb9a0430e40b2c56dfcea2ef3c773768f27210d1106f1f2be19cde3eea93f26f45e + brace-expansion: "npm:^5.0.2" + checksum: 10/aea4874e521c55bb60744685bbffe3d152e5460f84efac3ea936e6bbe2ceba7deb93345fec3f9bb17f7b6946776073a64d40ae32bf5f298ad690308121068a1f languageName: node linkType: hard "minimatch@npm:^5.0.1, minimatch@npm:^5.1.6": - version: 5.1.6 - resolution: "minimatch@npm:5.1.6" + version: 5.1.9 + resolution: "minimatch@npm:5.1.9" dependencies: brace-expansion: "npm:^2.0.1" - checksum: 10/126b36485b821daf96d33b5c821dac600cc1ab36c87e7a532594f9b1652b1fa89a1eebcaad4dff17c764dce1a7ac1531327f190fed5f97d8f6e5f889c116c429 + checksum: 10/23b4feb64dcb77ba93b70a72be551eb2e2677ac02178cf1ed3d38836cc4cd84802d90b77f60ef87f2bac64d270d2d8eba242e428f0554ea4e36bfdb7e9d25d0c languageName: node linkType: hard "minimatch@npm:^9.0.1, minimatch@npm:^9.0.3, minimatch@npm:^9.0.4, minimatch@npm:^9.0.5": - version: 9.0.5 - resolution: "minimatch@npm:9.0.5" + version: 9.0.9 + resolution: "minimatch@npm:9.0.9" dependencies: - brace-expansion: "npm:^2.0.1" - checksum: 10/dd6a8927b063aca6d910b119e1f2df6d2ce7d36eab91de83167dd136bb85e1ebff97b0d3de1cb08bd1f7e018ca170b4962479fefab5b2a69e2ae12cb2edc8348 + brace-expansion: "npm:^2.0.2" + checksum: 10/b91fad937deaffb68a45a2cb731ff3cff1c3baf9b6469c879477ed16f15c8f4ce39d63a3f75c2455107c2fdff0f3ab597d97dc09e2e93b883aafcf926ef0c8f9 languageName: node linkType: hard @@ -17385,10 +17427,10 @@ __metadata: languageName: node linkType: hard -"node-forge@npm:^1, node-forge@npm:^1.1.0": - version: 1.3.1 - resolution: "node-forge@npm:1.3.1" - checksum: 10/05bab6868633bf9ad4c3b1dd50ec501c22ffd69f556cdf169a00998ca1d03e8107a6032ba013852f202035372021b845603aeccd7dfcb58cdb7430013b3daa8d +"node-forge@npm:^1.3.2": + version: 1.3.3 + resolution: "node-forge@npm:1.3.3" + checksum: 10/f41c31b9296771a4b8c955d58417471712f54f324603a35f8e6cbac19d5e6eaaf5fd5fd14584dfedecbf46a05438ded6eee60a5f2f0822fc5061aaa073cfc75d languageName: node linkType: hard @@ -17654,6 +17696,13 @@ __metadata: languageName: node linkType: hard +"on-headers@npm:~1.1.0": + version: 1.1.0 + resolution: "on-headers@npm:1.1.0" + checksum: 10/98aa64629f986fb8cc4517dd8bede73c980e31208cba97f4442c330959f60ced3dc6214b83420491f5111fc7c4f4343abe2ea62c85f505cf041d67850f238776 + languageName: node + linkType: hard + "on-net-listen@npm:^1.1.0": version: 1.1.2 resolution: "on-net-listen@npm:1.1.2" @@ -19327,32 +19376,35 @@ __metadata: languageName: node linkType: hard -"rollup@npm:^4.43.0": - version: 4.52.3 - resolution: "rollup@npm:4.52.3" - dependencies: - "@rollup/rollup-android-arm-eabi": "npm:4.52.3" - "@rollup/rollup-android-arm64": "npm:4.52.3" - "@rollup/rollup-darwin-arm64": "npm:4.52.3" - "@rollup/rollup-darwin-x64": "npm:4.52.3" - "@rollup/rollup-freebsd-arm64": "npm:4.52.3" - "@rollup/rollup-freebsd-x64": "npm:4.52.3" - "@rollup/rollup-linux-arm-gnueabihf": "npm:4.52.3" - "@rollup/rollup-linux-arm-musleabihf": "npm:4.52.3" - "@rollup/rollup-linux-arm64-gnu": "npm:4.52.3" - "@rollup/rollup-linux-arm64-musl": "npm:4.52.3" - "@rollup/rollup-linux-loong64-gnu": "npm:4.52.3" - "@rollup/rollup-linux-ppc64-gnu": "npm:4.52.3" - "@rollup/rollup-linux-riscv64-gnu": "npm:4.52.3" - "@rollup/rollup-linux-riscv64-musl": "npm:4.52.3" - "@rollup/rollup-linux-s390x-gnu": "npm:4.52.3" - "@rollup/rollup-linux-x64-gnu": "npm:4.52.3" - "@rollup/rollup-linux-x64-musl": "npm:4.52.3" - "@rollup/rollup-openharmony-arm64": "npm:4.52.3" - "@rollup/rollup-win32-arm64-msvc": "npm:4.52.3" - "@rollup/rollup-win32-ia32-msvc": "npm:4.52.3" - "@rollup/rollup-win32-x64-gnu": "npm:4.52.3" - "@rollup/rollup-win32-x64-msvc": "npm:4.52.3" +"rollup@npm:^4.59.0": + version: 4.59.0 + resolution: "rollup@npm:4.59.0" + dependencies: + "@rollup/rollup-android-arm-eabi": "npm:4.59.0" + "@rollup/rollup-android-arm64": "npm:4.59.0" + "@rollup/rollup-darwin-arm64": "npm:4.59.0" + "@rollup/rollup-darwin-x64": "npm:4.59.0" + "@rollup/rollup-freebsd-arm64": "npm:4.59.0" + "@rollup/rollup-freebsd-x64": "npm:4.59.0" + "@rollup/rollup-linux-arm-gnueabihf": "npm:4.59.0" + "@rollup/rollup-linux-arm-musleabihf": "npm:4.59.0" + "@rollup/rollup-linux-arm64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-arm64-musl": "npm:4.59.0" + "@rollup/rollup-linux-loong64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-loong64-musl": "npm:4.59.0" + "@rollup/rollup-linux-ppc64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-ppc64-musl": "npm:4.59.0" + "@rollup/rollup-linux-riscv64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-riscv64-musl": "npm:4.59.0" + "@rollup/rollup-linux-s390x-gnu": "npm:4.59.0" + "@rollup/rollup-linux-x64-gnu": "npm:4.59.0" + "@rollup/rollup-linux-x64-musl": "npm:4.59.0" + "@rollup/rollup-openbsd-x64": "npm:4.59.0" + "@rollup/rollup-openharmony-arm64": "npm:4.59.0" + "@rollup/rollup-win32-arm64-msvc": "npm:4.59.0" + "@rollup/rollup-win32-ia32-msvc": "npm:4.59.0" + "@rollup/rollup-win32-x64-gnu": "npm:4.59.0" + "@rollup/rollup-win32-x64-msvc": "npm:4.59.0" "@types/estree": "npm:1.0.8" fsevents: "npm:~2.3.2" dependenciesMeta: @@ -19378,8 +19430,12 @@ __metadata: optional: true "@rollup/rollup-linux-loong64-gnu": optional: true + "@rollup/rollup-linux-loong64-musl": + optional: true "@rollup/rollup-linux-ppc64-gnu": optional: true + "@rollup/rollup-linux-ppc64-musl": + optional: true "@rollup/rollup-linux-riscv64-gnu": optional: true "@rollup/rollup-linux-riscv64-musl": @@ -19390,6 +19446,8 @@ __metadata: optional: true "@rollup/rollup-linux-x64-musl": optional: true + "@rollup/rollup-openbsd-x64": + optional: true "@rollup/rollup-openharmony-arm64": optional: true "@rollup/rollup-win32-arm64-msvc": @@ -19404,7 +19462,7 @@ __metadata: optional: true bin: rollup: dist/bin/rollup - checksum: 10/c4db19a7a04fa93b176ccca67a2ff9806f1edf8e4c2d55a362a6557fd957fe330109043b43ba4b8771fb7722d2cb3ef958b11a1b9c44ee4b6c20ee8f8f5ccdea + checksum: 10/728237932aad7022c0640cd126b9fe5285f2578099f22a0542229a17785320a6553b74582fa5977877541c1faf27de65ed2750bc89dbb55b525405244a46d9f1 languageName: node linkType: hard @@ -19462,13 +19520,6 @@ __metadata: languageName: node linkType: hard -"safe-buffer@npm:5.1.2, safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": - version: 5.1.2 - resolution: "safe-buffer@npm:5.1.2" - checksum: 10/7eb5b48f2ed9a594a4795677d5a150faa7eb54483b2318b568dc0c4fc94092a6cce5be02c7288a0500a156282f5276d5688bce7259299568d1053b2150ef374a - languageName: node - linkType: hard - "safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:^5.1.1, safe-buffer@npm:^5.1.2, safe-buffer@npm:^5.2.0, safe-buffer@npm:^5.2.1, safe-buffer@npm:~5.2.0": version: 5.2.1 resolution: "safe-buffer@npm:5.2.1" @@ -19476,6 +19527,13 @@ __metadata: languageName: node linkType: hard +"safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": + version: 5.1.2 + resolution: "safe-buffer@npm:5.1.2" + checksum: 10/7eb5b48f2ed9a594a4795677d5a150faa7eb54483b2318b568dc0c4fc94092a6cce5be02c7288a0500a156282f5276d5688bce7259299568d1053b2150ef374a + languageName: node + linkType: hard + "safe-regex-test@npm:^1.1.0": version: 1.1.0 resolution: "safe-regex-test@npm:1.1.0" @@ -19657,27 +19715,25 @@ __metadata: languageName: node linkType: hard -"serialize-javascript@npm:^6.0.2": - version: 6.0.2 - resolution: "serialize-javascript@npm:6.0.2" - dependencies: - randombytes: "npm:^2.1.0" - checksum: 10/445a420a6fa2eaee4b70cbd884d538e259ab278200a2ededd73253ada17d5d48e91fb1f4cd224a236ab62ea7ba0a70c6af29fc93b4f3d3078bf7da1c031fde58 +"serialize-javascript@npm:^7.0.3": + version: 7.0.4 + resolution: "serialize-javascript@npm:7.0.4" + checksum: 10/f96d59d6053739785822750e6ffbe06ec5a2651b836b3e6c76742c613e1b2fcb846b5df92294857ecfe69730fe36a1968c0eb8f258b02fd9173a1d5e73f0a32f languageName: node linkType: hard -"serve-handler@npm:6.1.6": - version: 6.1.6 - resolution: "serve-handler@npm:6.1.6" +"serve-handler@npm:6.1.7": + version: 6.1.7 + resolution: "serve-handler@npm:6.1.7" dependencies: bytes: "npm:3.0.0" content-disposition: "npm:0.5.2" mime-types: "npm:2.1.18" - minimatch: "npm:3.1.2" + minimatch: "npm:3.1.5" path-is-inside: "npm:1.0.2" path-to-regexp: "npm:3.3.0" range-parser: "npm:1.2.0" - checksum: 10/7e7d93eb7e69fcd9f9c5afc2ef2b46cb0072b4af13cbabef9bca725afb350ddae6857d8c8be2c256f7ce1f7677c20347801399c11caa5805c0090339f894e8f2 + checksum: 10/2366e53cc8e8376d58abb289293b930111fa5da6d14bb31eafac5b1162f332c45c6f394c7d78fdcf6b5736e12caf9370b02d05c7e8a75291d2fc6a55b52b14ea languageName: node linkType: hard @@ -19720,24 +19776,24 @@ __metadata: languageName: node linkType: hard -"serve@npm:^14.2.1": - version: 14.2.4 - resolution: "serve@npm:14.2.4" +"serve@npm:^14.2.6": + version: 14.2.6 + resolution: "serve@npm:14.2.6" dependencies: "@zeit/schemas": "npm:2.36.0" - ajv: "npm:8.12.0" + ajv: "npm:8.18.0" arg: "npm:5.0.2" boxen: "npm:7.0.0" chalk: "npm:5.0.1" chalk-template: "npm:0.4.0" clipboardy: "npm:3.0.0" - compression: "npm:1.7.4" + compression: "npm:1.8.1" is-port-reachable: "npm:4.0.0" - serve-handler: "npm:6.1.6" + serve-handler: "npm:6.1.7" update-check: "npm:1.5.4" bin: serve: build/main.js - checksum: 10/79627f399226b765f6e2f0f62faeceda5db17d00f40f9ad9faa39049729ea4ce7b595a72cc0dba3543947288772cb60f2b0ab91efa3bbedfe644ca7ee0484df1 + checksum: 10/5ed677e260b21eb6c3f04d5d109f258a06272881d91cbb71a6e2dae6045653d59ae535c94644c017b03db5b472e35523cda287328feaa1d629d4674960ceaa86 languageName: node linkType: hard @@ -20691,12 +20747,12 @@ __metadata: languageName: node linkType: hard -"systeminformation@npm:5.23.8": - version: 5.23.8 - resolution: "systeminformation@npm:5.23.8" +"systeminformation@npm:^5.31.0": + version: 5.31.1 + resolution: "systeminformation@npm:5.31.1" bin: systeminformation: lib/cli.js - checksum: 10/a722b50a2740a4f880901ccbeb854d12d6abe33c3777c5db040eeee4282a1bf52d73d9f41d63d69e897b2cab083c993f1c8f7217c386989456eb920cb2b6b3b7 + checksum: 10/1fff0b2827f7de2ec5379385c9bb12896db92186ee1d721cb08791ae7277a02f39fb8f3060df47312b70fe88c5b91a70726e7265ca8e5bab1f87780fb2acb991 conditions: (os=darwin | os=linux | os=win32 | os=freebsd | os=openbsd | os=netbsd | os=sunos | os=android) languageName: node linkType: hard From 5497841a83a17614077167816386e678a23dc5f6 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Thu, 5 Mar 2026 09:53:58 -0500 Subject: [PATCH 32/37] fix: omit bigint priceBumpPercentage from IPC config in testbench worker (#21169) Forward-port of https://github.com/AztecProtocol/aztec-packages/pull/21086 to next (via merge-train/spartan where the priceBumpPercentage feature lives). Fixes `TypeError: Do not know how to serialize a BigInt` in `p2p_client.proposal_tx_collector.bench.test.ts`. The `priceBumpPercentage` config field is a `bigint` which can't be serialized over IPC (JSON). This omits it from the IPC config and restores the default on the worker side. ClaudeBox log: http://ci.aztec-labs.com/0233e65ff02e664d-2 --- .../src/testbench/p2p_client_testbench_worker.ts | 1 + .../p2p/src/testbench/worker_client_manager.ts | 15 +++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts b/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts index e8cb8ac7ca46..32c46967fc65 100644 --- a/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts +++ b/yarn-project/p2p/src/testbench/p2p_client_testbench_worker.ts @@ -340,6 +340,7 @@ process.on('message', async msg => { const config: P2PConfig = { ...rawConfig, peerIdPrivateKey: rawConfig.peerIdPrivateKey ? new SecretValue(rawConfig.peerIdPrivateKey) : undefined, + priceBumpPercentage: 10n, } as P2PConfig; workerConfig = config; diff --git a/yarn-project/p2p/src/testbench/worker_client_manager.ts b/yarn-project/p2p/src/testbench/worker_client_manager.ts index d0e3d3d1334f..ac85411b4135 100644 --- a/yarn-project/p2p/src/testbench/worker_client_manager.ts +++ b/yarn-project/p2p/src/testbench/worker_client_manager.ts @@ -81,13 +81,15 @@ class WorkerClientManager { * Note: We send the raw peerIdPrivateKey string instead of SecretValue * because SecretValue.toJSON() returns '[Redacted]', losing the value. * The worker must re-wrap it in SecretValue. + * We also omit priceBumpPercentage since it's a bigint and can't be + * serialized over IPC (which uses JSON under the hood). */ private createClientConfig( clientIndex: number, port: number, otherNodes: string[], - ): Omit & { peerIdPrivateKey: string } & Partial { - return { + ): Omit & { peerIdPrivateKey: string } & Partial { + const { priceBumpPercentage: _, ...config } = { ...getP2PDefaultConfig(), p2pEnabled: true, peerIdPrivateKey: this.peerIdPrivateKeys[clientIndex], @@ -96,7 +98,10 @@ class WorkerClientManager { p2pPort: port, bootstrapNodes: [...otherNodes], ...this.p2pConfig, - } as Omit & { peerIdPrivateKey: string } & Partial; + }; + return config as Omit & { + peerIdPrivateKey: string; + } & Partial; } /** @@ -104,7 +109,9 @@ class WorkerClientManager { * Config uses raw string for peerIdPrivateKey (not SecretValue) for IPC serialization. */ private spawnWorkerProcess( - config: Omit & { peerIdPrivateKey: string } & Partial, + config: Omit & { + peerIdPrivateKey: string; + } & Partial, clientIndex: number, ): [ChildProcess, Promise] { const useCompiled = existsSync(workerJsPath); From 6a205f77d84959942503f4f7317563a27f311af8 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 5 Mar 2026 16:08:08 -0300 Subject: [PATCH 33/37] fix(p2p): report most severe failure in runValidations (#21185) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When multiple validators fail, `runValidations` reported whichever failure appeared first in the Record by insertion order. Now it reports the one with the harshest penalty severity. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Opus 4.6 --- yarn-project/p2p/src/services/libp2p/libp2p_service.ts | 8 ++++++-- yarn-project/stdlib/src/p2p/peer_error.ts | 7 +++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 2a81e7a1350d..d467d60ae734 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -1,5 +1,6 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; import { BlockNumber, type SlotNumber } from '@aztec/foundation/branded-types'; +import { maxBy } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, createLibp2pComponentLogger, createLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; @@ -19,6 +20,7 @@ import { P2PMessage, type ValidationResult as P2PValidationResult, PeerErrorSeverity, + PeerErrorSeverityByHarshness, TopicType, createTopicString, getTopicsForConfig, @@ -1662,8 +1664,10 @@ export class LibP2PService extends WithTracer implements P2PService { // A promise that resolves when all validations have been run const allValidations = await Promise.all(validationPromises); - const failed = allValidations.find(x => !x.isValid); - if (failed) { + const failures = allValidations.filter(x => !x.isValid); + if (failures.length > 0) { + // Pick the most severe failure (lowest tolerance = harshest penalty) + const failed = maxBy(failures, f => PeerErrorSeverityByHarshness.indexOf(f.severity))!; return { allPassed: false, failure: { diff --git a/yarn-project/stdlib/src/p2p/peer_error.ts b/yarn-project/stdlib/src/p2p/peer_error.ts index 73be41ebada2..f3edb326fb55 100644 --- a/yarn-project/stdlib/src/p2p/peer_error.ts +++ b/yarn-project/stdlib/src/p2p/peer_error.ts @@ -15,3 +15,10 @@ export enum PeerErrorSeverity { */ HighToleranceError = 'HighToleranceError', } + +/** Severities ordered from mildest to harshest. */ +export const PeerErrorSeverityByHarshness = [ + PeerErrorSeverity.HighToleranceError, + PeerErrorSeverity.MidToleranceError, + PeerErrorSeverity.LowToleranceError, +] as const; From 8d9331640441d8d9386ca38228f2f6aeed266b72 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 6 Mar 2026 04:33:53 -0500 Subject: [PATCH 34/37] fix: use dedicated L1 account for bot bridge resume tests to avoid nonce race (#21148) ## Summary - The "bridge resume" tests in `e2e_bot.test.ts` were using `l1Mnemonic` with the hardhat default mnemonic (account index 0), which is the **same L1 account** the sequencer uses for block proposals - This caused a flaky nonce-too-low error when the sequencer sent an L1 transaction between the bot's mint and approve calls - Fix: use `l1PrivateKey` from account index 7 (a dedicated, unused index) instead ## Test plan - The flaky test "does not reuse prior bridge claims if recipient address changes" should no longer fail with nonce-too-low errors - Other bot tests using indices 8 and 9 are unaffected ClaudeBox log: http://ci.aztec-labs.com/7cffb2ae9eb6b324-1 --- yarn-project/end-to-end/src/e2e_bot.test.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_bot.test.ts b/yarn-project/end-to-end/src/e2e_bot.test.ts index 9fb5d09ff8f7..ba55a1a4941d 100644 --- a/yarn-project/end-to-end/src/e2e_bot.test.ts +++ b/yarn-project/end-to-end/src/e2e_bot.test.ts @@ -134,7 +134,10 @@ describe('e2e_bot', () => { l1RpcUrls, feePaymentMethod: 'fee_juice', - l1PrivateKey: getPrivateKey(), + // Use a dedicated L1 account (index 7) for bridging. The default mnemonic account (index 0) + // is shared with the sequencer which sends L1 block proposals, causing nonce races on the + // approve/deposit calls in bridgeL1FeeJuice. Indices 8 and 9 are used by other tests below. + l1PrivateKey: new SecretValue(bufferToHex(getPrivateKeyFromIndex(7)!)), flushSetupTransactions: true, // Increase fee headroom to handle fee volatility from rapid block building in tests. // Fees can escalate >10x due to blocks built by earlier tests and bridge operations. @@ -174,7 +177,8 @@ describe('e2e_bot', () => { l1RpcUrls, feePaymentMethod: 'fee_juice', - l1PrivateKey: getPrivateKey(), + // See comment above — dedicated L1 account to avoid nonce races with the sequencer. + l1PrivateKey: new SecretValue(bufferToHex(getPrivateKeyFromIndex(7)!)), flushSetupTransactions: true, // Increase fee headroom to handle fee volatility from rapid block building in tests. // This test is especially susceptible because changing salt triggers a new bridge claim, From 6c1a3557a633f13f2bf3e4e92f787d60535e85d8 Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 6 Mar 2026 10:12:20 +0000 Subject: [PATCH 35/37] fix: parse error.message in formatViemError (#21163) Fixes [A-581](https://linear.app/aztec-labs/issue/A-581/viem-error-formatting) --- yarn-project/ethereum/src/utils.ts | 31 ++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/yarn-project/ethereum/src/utils.ts b/yarn-project/ethereum/src/utils.ts index 35804849358c..81673660565f 100644 --- a/yarn-project/ethereum/src/utils.ts +++ b/yarn-project/ethereum/src/utils.ts @@ -170,6 +170,21 @@ function getNestedErrorData(error: unknown): string | undefined { return undefined; } +/** + * Truncates an error message to a safe length for log renderers. + * LogExplorer can only render up to 2500 characters in its summary view. + * We cap at 2000 to leave room for decorating context added by callers. + */ +function truncateErrorMessage(message: string): string { + const MAX = 2000; + const CHUNK = 950; + if (message.length <= MAX) { + return message; + } + const truncated = message.length - 2 * CHUNK; + return message.slice(0, CHUNK) + `...${truncated} characters truncated...` + message.slice(-CHUNK); +} + /** * Formats a Viem error into a FormattedViemError instance. * @param error - The error to format. @@ -232,22 +247,10 @@ export function formatViemError(error: any, abi: Abi = ErrorsAbi): FormattedViem // If it's a regular Error instance, return it with its message if (error instanceof Error) { - return new FormattedViemError(error.message, (error as any)?.metaMessages); - } - - const body = String(error); - const length = body.length; - // LogExplorer can only render up to 2500 characters in it's summary view. Try to keep the whole message below this number - // Limit the error to 2000 chacaters in order to allow code higher up to decorate this error with extra details (up to 500 characters) - if (length > 2000) { - const chunk = 950; - const truncated = length - 2 * chunk; - return new FormattedViemError( - body.slice(0, chunk) + `...${truncated} characters truncated...` + body.slice(-1 * chunk), - ); + return new FormattedViemError(truncateErrorMessage(error.message), (error as any)?.metaMessages); } - return new FormattedViemError(body); + return new FormattedViemError(truncateErrorMessage(String(error))); } function stripAbis(obj: any) { From 145f08909771b3773a9666417520f7196818cd3c Mon Sep 17 00:00:00 2001 From: danielntmd <162406516+danielntmd@users.noreply.github.com> Date: Fri, 6 Mar 2026 05:12:30 -0500 Subject: [PATCH 36/37] fix: bump lighthouse consensus client v7.1.0 -> v8.0.1 (#21170) - reduce aztec committee lag -> 1 for scenario network Co-authored-by: danielntmd --- spartan/environments/next-scenario.env | 4 ++-- spartan/eth-devnet/create_genesis.sh | 2 +- spartan/eth-devnet/entrypoints/eth-beacon.sh | 1 + spartan/eth-devnet/values.yaml | 2 +- spartan/terraform/deploy-eth-devnet/values/eth-devnet.yaml | 2 +- 5 files changed, 6 insertions(+), 5 deletions(-) diff --git a/spartan/environments/next-scenario.env b/spartan/environments/next-scenario.env index b0668e5c960d..fcfb9f34d893 100644 --- a/spartan/environments/next-scenario.env +++ b/spartan/environments/next-scenario.env @@ -13,8 +13,8 @@ DESTROY_AZTEC_INFRA=true VERIFY_CONTRACTS=false USE_LOAD_BALANCERS=true -AZTEC_LAG_IN_EPOCHS_FOR_VALIDATOR_SET=2 -AZTEC_LAG_IN_EPOCHS_FOR_RANDAO=2 +AZTEC_LAG_IN_EPOCHS_FOR_VALIDATOR_SET=1 +AZTEC_LAG_IN_EPOCHS_FOR_RANDAO=1 OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET diff --git a/spartan/eth-devnet/create_genesis.sh b/spartan/eth-devnet/create_genesis.sh index 11944384d1be..73948a9031e4 100755 --- a/spartan/eth-devnet/create_genesis.sh +++ b/spartan/eth-devnet/create_genesis.sh @@ -135,7 +135,7 @@ function create_beacon_genesis { docker run --rm \ -v "$tmp_dir:/tmp" \ -v "$beacon_genesis_path:/out" \ - maddiaa/eth-beacon-genesis devnet \ + ethpandaops/eth-beacon-genesis:master-f57c0fb devnet \ --config="/tmp/config.yaml" \ --eth1-config="/tmp/genesis.json" \ --mnemonics="/tmp/mnemonics.yaml" \ diff --git a/spartan/eth-devnet/entrypoints/eth-beacon.sh b/spartan/eth-devnet/entrypoints/eth-beacon.sh index ae3678bad4ba..3b50cda3b238 100755 --- a/spartan/eth-devnet/entrypoints/eth-beacon.sh +++ b/spartan/eth-devnet/entrypoints/eth-beacon.sh @@ -21,6 +21,7 @@ lighthouse bn \ --disable-enr-auto-update \ --staking \ --http \ + --supernode \ --http-address=0.0.0.0 \ --http-port=${BEACON_HTTP_PORT} \ --validator-monitor-auto \ diff --git a/spartan/eth-devnet/values.yaml b/spartan/eth-devnet/values.yaml index 1a9c213fb80b..31de1d98b1ef 100644 --- a/spartan/eth-devnet/values.yaml +++ b/spartan/eth-devnet/values.yaml @@ -12,7 +12,7 @@ images: image: nethermind/nethermind:1.32.2 pullPolicy: IfNotPresent lighthouse: - image: sigp/lighthouse:v7.1.0 + image: sigp/lighthouse:v8.0.1 pullPolicy: IfNotPresent ethereum: diff --git a/spartan/terraform/deploy-eth-devnet/values/eth-devnet.yaml b/spartan/terraform/deploy-eth-devnet/values/eth-devnet.yaml index a57c105553f9..dfd70704e2c1 100644 --- a/spartan/terraform/deploy-eth-devnet/values/eth-devnet.yaml +++ b/spartan/terraform/deploy-eth-devnet/values/eth-devnet.yaml @@ -12,7 +12,7 @@ images: image: nethermind/nethermind:1.32.2 pullPolicy: IfNotPresent lighthouse: - image: sigp/lighthouse:v7.1.0 + image: sigp/lighthouse:v8.0.1 pullPolicy: IfNotPresent ethereum: From dad460eb45ffec75208e311d1e66524ab27965d7 Mon Sep 17 00:00:00 2001 From: Michal Rzeszutko Date: Fri, 6 Mar 2026 13:27:39 +0100 Subject: [PATCH 37/37] chore: code decuplication + refactor (public setup allowlist) (#21200) ## Summary Follow-up to #21154, addressing review feedback to deduplicate code and use contract artifacts instead of hardcoded signature strings. - **New `buildAllowedElement` helper** (`@aztec/p2p/msg_validators`): Builds an `AllowedElement` from a `ContractArtifact` + function name, deriving both the selector (via `FunctionSelector.fromNameAndParameters`) and calldata length from the artifact. Eliminates all hardcoded `FunctionSelector.fromSignature(...)` calls. - **Refactored protocol allowlist** (`allowed_public_setup.ts`): Uses `buildAllowedElement` with `AuthRegistryArtifact` and `FeeJuiceArtifact` instead of manually constructing selectors and calldata lengths. - **Deduplicated token allowlist** into a single shared `getTokenAllowedSetupFunctions()` in `@aztec/aztec/testing`, removing three identical copies from `local-network.ts`, `fees_test.ts`, and `client_flows_benchmark.ts`. - **Refactored `fee_payer_balance.ts`**: Replaced hardcoded `fromSignature('_increase_public_balance((Field),u128)')` with artifact-derived selector using `FeeJuiceArtifact`. - **Left `public_fee_payment_method.ts` and `private_fee_payment_method.ts` as-is**: These deprecated classes in `aztec.js` would require adding contract artifact dependencies or API changes to refactor. Net result: **-154 lines** removed across 3 duplicated functions and hardcoded selectors, **+73 lines** added for the shared helper and single source of truth. --- .../aztec/src/local-network/local-network.ts | 39 +-------------- yarn-project/aztec/src/testing/index.ts | 1 + .../aztec/src/testing/token_allowed_setup.ts | 19 ++++++++ .../client_flows/client_flows_benchmark.ts | 37 +------------- .../end-to-end/src/e2e_fees/fees_test.ts | 48 +------------------ .../tx_validator/allowed_public_setup.ts | 43 ++++------------- .../tx_validator/allowed_setup_helpers.ts | 31 ++++++++++++ .../tx_validator/fee_payer_balance.ts | 8 +++- .../src/msg_validators/tx_validator/index.ts | 1 + 9 files changed, 73 insertions(+), 154 deletions(-) create mode 100644 yarn-project/aztec/src/testing/token_allowed_setup.ts create mode 100644 yarn-project/p2p/src/msg_validators/tx_validator/allowed_setup_helpers.ts diff --git a/yarn-project/aztec/src/local-network/local-network.ts b/yarn-project/aztec/src/local-network/local-network.ts index 4f62ea214738..5856a93bf8e7 100644 --- a/yarn-project/aztec/src/local-network/local-network.ts +++ b/yarn-project/aztec/src/local-network/local-network.ts @@ -16,15 +16,11 @@ import { SecretValue } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; import type { LogFn } from '@aztec/foundation/log'; import { DateProvider, TestDateProvider } from '@aztec/foundation/timer'; -import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractsHash } from '@aztec/protocol-contracts'; import { SequencerState } from '@aztec/sequencer-client'; -import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; -import type { FunctionAbi } from '@aztec/stdlib/abi'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; -import type { AllowedElement, ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; +import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import { type TelemetryClient, @@ -43,43 +39,12 @@ import { createAccountLogs } from '../cli/util.js'; import { DefaultMnemonic } from '../mnemonic.js'; import { AnvilTestWatcher } from '../testing/anvil_test_watcher.js'; import { EpochTestSettler } from '../testing/epoch_test_settler.js'; +import { getTokenAllowedSetupFunctions } from '../testing/token_allowed_setup.js'; import { getBananaFPCAddress, setupBananaFPC } from './banana_fpc.js'; import { getSponsoredFPCAddress } from './sponsored_fpc.js'; const logger = createLogger('local-network'); -/** - * Returns Token-specific allowlist entries for FPC-based fee payments. - * The local network deploys a banana FPC and Token contracts, so the node must allow Token setup functions. - */ -async function getTokenAllowedSetupFunctions(): Promise { - const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; - const allFunctions: FunctionAbi[] = (TokenContractArtifact.functions as FunctionAbi[]).concat( - TokenContractArtifact.nonDispatchPublicFunctions || [], - ); - const getCalldataLength = (name: string) => { - const fn = allFunctions.find(f => f.name === name)!; - return 1 + countArgumentsSize(fn); - }; - const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); - const transferInPublicSelector = await FunctionSelector.fromSignature( - 'transfer_in_public((Field),(Field),u128,Field)', - ); - return [ - { - classId: tokenClassId, - selector: increaseBalanceSelector, - calldataLength: getCalldataLength('_increase_public_balance'), - onlySelf: true, - }, - { - classId: tokenClassId, - selector: transferInPublicSelector, - calldataLength: getCalldataLength('transfer_in_public'), - }, - ]; -} - const localAnvil = foundry; /** diff --git a/yarn-project/aztec/src/testing/index.ts b/yarn-project/aztec/src/testing/index.ts index eaf2f836c9a3..97d6da751638 100644 --- a/yarn-project/aztec/src/testing/index.ts +++ b/yarn-project/aztec/src/testing/index.ts @@ -2,3 +2,4 @@ export { AnvilTestWatcher } from './anvil_test_watcher.js'; export { EthCheatCodes, RollupCheatCodes } from '@aztec/ethereum/test'; export { CheatCodes } from './cheat_codes.js'; export { EpochTestSettler } from './epoch_test_settler.js'; +export { getTokenAllowedSetupFunctions } from './token_allowed_setup.js'; diff --git a/yarn-project/aztec/src/testing/token_allowed_setup.ts b/yarn-project/aztec/src/testing/token_allowed_setup.ts new file mode 100644 index 000000000000..539b9521bddf --- /dev/null +++ b/yarn-project/aztec/src/testing/token_allowed_setup.ts @@ -0,0 +1,19 @@ +import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; +import { buildAllowedElement } from '@aztec/p2p/msg_validators'; +import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; +import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; + +/** + * Returns Token-specific allowlist entries needed for FPC-based fee payments. + * These are test-only: FPC-based fee payment with custom tokens won't work on mainnet alpha. + */ +export async function getTokenAllowedSetupFunctions(): Promise { + const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; + const target = { classId: tokenClassId }; + return Promise.all([ + // Token: needed for private transfers via FPC (transfer_to_public enqueues this) + buildAllowedElement(TokenContractArtifact, target, '_increase_public_balance', { onlySelf: true }), + // Token: needed for public transfers via FPC (fee_entrypoint_public enqueues this) + buildAllowedElement(TokenContractArtifact, target, 'transfer_in_public'), + ]); +} diff --git a/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts b/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts index 26e6e78f3dc1..c461800a7912 100644 --- a/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts +++ b/yarn-project/end-to-end/src/bench/client_flows/client_flows_benchmark.ts @@ -4,7 +4,7 @@ import { type FeePaymentMethod, PrivateFeePaymentMethod, SponsoredFeePaymentMeth import { type Logger, createLogger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; import type { Wallet } from '@aztec/aztec.js/wallet'; -import { CheatCodes } from '@aztec/aztec/testing'; +import { CheatCodes, getTokenAllowedSetupFunctions } from '@aztec/aztec/testing'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { RollupContract } from '@aztec/ethereum/contracts'; import type { DeployAztecL1ContractsArgs } from '@aztec/ethereum/deploy-aztec-l1-contracts'; @@ -19,16 +19,12 @@ import { AMMContract } from '@aztec/noir-contracts.js/AMM'; import { FPCContract } from '@aztec/noir-contracts.js/FPC'; import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; import { SponsoredFPCContract } from '@aztec/noir-contracts.js/SponsoredFPC'; -import { TokenContract as BananaCoin, TokenContract, TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; +import { TokenContract as BananaCoin, TokenContract } from '@aztec/noir-contracts.js/Token'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; import { type PXEConfig, getPXEConfig } from '@aztec/pxe/server'; -import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; -import type { FunctionAbi } from '@aztec/stdlib/abi'; -import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import { GasSettings } from '@aztec/stdlib/gas'; -import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; import { deriveSigningKey } from '@aztec/stdlib/keys'; import { MNEMONIC } from '../../fixtures/fixtures.js'; @@ -46,35 +42,6 @@ import { type ClientFlowsConfig, FULL_FLOWS_CONFIG, KEY_FLOWS_CONFIG } from './c const { BENCHMARK_CONFIG } = process.env; -/** Returns Token-specific allowlist entries for FPC-based fee payments (test-only). */ -async function getTokenAllowedSetupFunctions(): Promise { - const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; - const allFunctions: FunctionAbi[] = (TokenContractArtifact.functions as FunctionAbi[]).concat( - TokenContractArtifact.nonDispatchPublicFunctions || [], - ); - const getCalldataLength = (name: string) => { - const fn = allFunctions.find(f => f.name === name)!; - return 1 + countArgumentsSize(fn); - }; - const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); - const transferInPublicSelector = await FunctionSelector.fromSignature( - 'transfer_in_public((Field),(Field),u128,Field)', - ); - return [ - { - classId: tokenClassId, - selector: increaseBalanceSelector, - calldataLength: getCalldataLength('_increase_public_balance'), - onlySelf: true, - }, - { - classId: tokenClassId, - selector: transferInPublicSelector, - calldataLength: getCalldataLength('transfer_in_public'), - }, - ]; -} - export type AccountType = 'ecdsar1' | 'schnorr'; export type FeePaymentMethodGetter = (wallet: Wallet, sender: AztecAddress) => Promise; export type BenchmarkingFeePaymentMethod = 'bridged_fee_juice' | 'private_fpc' | 'sponsored_fpc' | 'fee_juice'; diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts index f163d566681d..f83d7a125f39 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -1,7 +1,7 @@ import type { AztecAddress } from '@aztec/aztec.js/addresses'; import { type Logger, createLogger } from '@aztec/aztec.js/log'; import type { AztecNode } from '@aztec/aztec.js/node'; -import { CheatCodes } from '@aztec/aztec/testing'; +import { CheatCodes, getTokenAllowedSetupFunctions } from '@aztec/aztec/testing'; import { createExtendedL1Client } from '@aztec/ethereum/client'; import { RollupContract } from '@aztec/ethereum/contracts'; import type { DeployAztecL1ContractsArgs } from '@aztec/ethereum/deploy-aztec-l1-contracts'; @@ -14,16 +14,12 @@ import { AppSubscriptionContract } from '@aztec/noir-contracts.js/AppSubscriptio import { FPCContract } from '@aztec/noir-contracts.js/FPC'; import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; import { SponsoredFPCContract } from '@aztec/noir-contracts.js/SponsoredFPC'; -import { TokenContract as BananaCoin, TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; +import { TokenContract as BananaCoin } from '@aztec/noir-contracts.js/Token'; import { CounterContract } from '@aztec/noir-test-contracts.js/Counter'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { getCanonicalFeeJuice } from '@aztec/protocol-contracts/fee-juice'; -import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; -import type { FunctionAbi } from '@aztec/stdlib/abi'; -import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; import { GasSettings } from '@aztec/stdlib/gas'; import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; -import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; import { getContract } from 'viem'; @@ -41,46 +37,6 @@ import { type BalancesFn, getBalancesFn, setupSponsoredFPC } from '../fixtures/u import { FeeJuicePortalTestingHarnessFactory, type GasBridgingTestHarness } from '../shared/gas_portal_test_harness.js'; import { TestWallet } from '../test-wallet/test_wallet.js'; -/** Returns the calldata length for a function: 1 (selector) + arguments size. */ -function getCalldataLength(functionName: string): number { - const allFunctions: FunctionAbi[] = (TokenContractArtifact.functions as FunctionAbi[]).concat( - TokenContractArtifact.nonDispatchPublicFunctions || [], - ); - const fn = allFunctions.find(f => f.name === functionName); - if (!fn) { - throw new Error(`Unknown function ${functionName} in Token artifact`); - } - return 1 + countArgumentsSize(fn); -} - -/** - * Returns Token-specific allowlist entries needed for FPC-based fee payments. - * These are test-only — FPC-based fee payment with custom tokens won't work on mainnet alpha. - */ -async function getTokenAllowedSetupFunctions(): Promise { - const tokenClassId = (await getContractClassFromArtifact(TokenContractArtifact)).id; - const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); - const transferInPublicSelector = await FunctionSelector.fromSignature( - 'transfer_in_public((Field),(Field),u128,Field)', - ); - - return [ - // Token: needed for private transfers via FPC (transfer_to_public enqueues this) - { - classId: tokenClassId, - selector: increaseBalanceSelector, - calldataLength: getCalldataLength('_increase_public_balance'), - onlySelf: true, - }, - // Token: needed for public transfers via FPC (fee_entrypoint_public enqueues this) - { - classId: tokenClassId, - selector: transferInPublicSelector, - calldataLength: getCalldataLength('transfer_in_public'), - }, - ]; -} - /** * Test fixture for testing fees. Provides the following setup steps: * InitialAccounts: Initializes 3 Schnorr account contracts. diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts index b8709732a192..41231ae207b2 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_public_setup.ts @@ -1,55 +1,30 @@ import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { AuthRegistryArtifact } from '@aztec/protocol-contracts/auth-registry'; import { FeeJuiceArtifact } from '@aztec/protocol-contracts/fee-juice'; -import { FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; -import type { ContractArtifact, FunctionAbi } from '@aztec/stdlib/abi'; import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; -/** Returns the expected calldata length for a function: 1 (selector) + arguments size. */ -function getCalldataLength(artifact: ContractArtifact, functionName: string): number { - const allFunctions: FunctionAbi[] = (artifact.functions as FunctionAbi[]).concat( - artifact.nonDispatchPublicFunctions || [], - ); - const fn = allFunctions.find(f => f.name === functionName); - if (!fn) { - throw new Error(`Unknown function ${functionName} in artifact ${artifact.name}`); - } - return 1 + countArgumentsSize(fn); -} +import { buildAllowedElement } from './allowed_setup_helpers.js'; let defaultAllowedSetupFunctions: AllowedElement[] | undefined; /** Returns the default list of functions allowed to run in the setup phase of a transaction. */ export async function getDefaultAllowedSetupFunctions(): Promise { if (defaultAllowedSetupFunctions === undefined) { - const setAuthorizedInternalSelector = await FunctionSelector.fromSignature('_set_authorized((Field),Field,bool)'); - const setAuthorizedSelector = await FunctionSelector.fromSignature('set_authorized(Field,bool)'); - const increaseBalanceSelector = await FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); - - defaultAllowedSetupFunctions = [ + defaultAllowedSetupFunctions = await Promise.all([ // AuthRegistry: needed for authwit support via private path (set_authorized_private enqueues _set_authorized) - { - address: ProtocolContractAddress.AuthRegistry, - selector: setAuthorizedInternalSelector, - calldataLength: getCalldataLength(AuthRegistryArtifact, '_set_authorized'), + buildAllowedElement(AuthRegistryArtifact, { address: ProtocolContractAddress.AuthRegistry }, '_set_authorized', { onlySelf: true, rejectNullMsgSender: true, - }, + }), // AuthRegistry: needed for authwit support via public path (PublicFeePaymentMethod calls set_authorized directly) - { - address: ProtocolContractAddress.AuthRegistry, - selector: setAuthorizedSelector, - calldataLength: getCalldataLength(AuthRegistryArtifact, 'set_authorized'), + buildAllowedElement(AuthRegistryArtifact, { address: ProtocolContractAddress.AuthRegistry }, 'set_authorized', { rejectNullMsgSender: true, - }, + }), // FeeJuice: needed for claiming on the same tx as a spend (claim_and_end_setup enqueues this) - { - address: ProtocolContractAddress.FeeJuice, - selector: increaseBalanceSelector, - calldataLength: getCalldataLength(FeeJuiceArtifact, '_increase_public_balance'), + buildAllowedElement(FeeJuiceArtifact, { address: ProtocolContractAddress.FeeJuice }, '_increase_public_balance', { onlySelf: true, - }, - ]; + }), + ]); } return defaultAllowedSetupFunctions; } diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/allowed_setup_helpers.ts b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_setup_helpers.ts new file mode 100644 index 000000000000..3e63283c4696 --- /dev/null +++ b/yarn-project/p2p/src/msg_validators/tx_validator/allowed_setup_helpers.ts @@ -0,0 +1,31 @@ +import type { Fr } from '@aztec/foundation/curves/bn254'; +import { FunctionSelector, countArgumentsSize, getAllFunctionAbis } from '@aztec/stdlib/abi'; +import type { ContractArtifact } from '@aztec/stdlib/abi'; +import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { AllowedElement } from '@aztec/stdlib/interfaces/server'; + +/** + * Builds an AllowedElement from a contract artifact, deriving both the function selector + * and calldata length from the artifact instead of hardcoding signature strings. + */ +export async function buildAllowedElement( + artifact: ContractArtifact, + target: { address: AztecAddress } | { classId: Fr }, + functionName: string, + opts?: { onlySelf?: boolean; rejectNullMsgSender?: boolean }, +): Promise { + const allFunctions = getAllFunctionAbis(artifact); + const fn = allFunctions.find(f => f.name === functionName); + if (!fn) { + throw new Error(`Unknown function ${functionName} in artifact ${artifact.name}`); + } + const selector = await FunctionSelector.fromNameAndParameters(fn.name, fn.parameters); + const calldataLength = 1 + countArgumentsSize(fn); + return { + ...target, + selector, + calldataLength, + ...(opts?.onlySelf ? { onlySelf: true } : {}), + ...(opts?.rejectNullMsgSender ? { rejectNullMsgSender: true } : {}), + } as AllowedElement; +} diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/fee_payer_balance.ts b/yarn-project/p2p/src/msg_validators/tx_validator/fee_payer_balance.ts index 4ce80e9cae29..af3936effa58 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/fee_payer_balance.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/fee_payer_balance.ts @@ -1,5 +1,6 @@ +import { FeeJuiceArtifact } from '@aztec/protocol-contracts/fee-juice'; import { getCallRequestsWithCalldataByPhase } from '@aztec/simulator/server'; -import { FunctionSelector } from '@aztec/stdlib/abi'; +import { FunctionSelector, getAllFunctionAbis } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type Tx, TxExecutionPhase } from '@aztec/stdlib/tx'; @@ -8,7 +9,10 @@ export type FeePayerBalanceDelta = { claimAmount: bigint; }; -const increasePublicBalanceSelectorPromise = FunctionSelector.fromSignature('_increase_public_balance((Field),u128)'); +const increasePublicBalanceSelectorPromise = (() => { + const fn = getAllFunctionAbis(FeeJuiceArtifact).find(f => f.name === '_increase_public_balance')!; + return FunctionSelector.fromNameAndParameters(fn.name, fn.parameters); +})(); export function getTxFeeLimit(tx: Tx): bigint { return tx.data.constants.txContext.gasSettings.getFeeLimit().toBigInt(); diff --git a/yarn-project/p2p/src/msg_validators/tx_validator/index.ts b/yarn-project/p2p/src/msg_validators/tx_validator/index.ts index dcdd4fb050e9..893796772a3b 100644 --- a/yarn-project/p2p/src/msg_validators/tx_validator/index.ts +++ b/yarn-project/p2p/src/msg_validators/tx_validator/index.ts @@ -8,6 +8,7 @@ export * from './gas_validator.js'; export * from './phases_validator.js'; export * from './test_utils.js'; export * from './allowed_public_setup.js'; +export * from './allowed_setup_helpers.js'; export * from './archive_cache.js'; export * from './tx_permitted_validator.js'; export * from './timestamp_validator.js';