diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps_redistribution.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps_redistribution.test.ts new file mode 100644 index 000000000000..6a8d665eaa7c --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_mbps_redistribution.test.ts @@ -0,0 +1,372 @@ +import type { Archiver } from '@aztec/archiver'; +import type { AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node'; +import { AztecAddress, EthAddress } from '@aztec/aztec.js/addresses'; +import { NO_WAIT } from '@aztec/aztec.js/contracts'; +import { Fr } from '@aztec/aztec.js/fields'; +import type { Logger } from '@aztec/aztec.js/log'; +import { waitForTx } from '@aztec/aztec.js/node'; +import { RollupContract } from '@aztec/ethereum/contracts'; +import type { Operator } from '@aztec/ethereum/deploy-aztec-l1-contracts'; +import { waitUntilL1Timestamp } from '@aztec/ethereum/l1-tx-utils'; +import { asyncMap } from '@aztec/foundation/async-map'; +import { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { times, timesAsync } from '@aztec/foundation/collection'; +import { SecretValue } from '@aztec/foundation/config'; +import { retryUntil } from '@aztec/foundation/retry'; +import { sleep } from '@aztec/foundation/sleep'; +import { executeTimeout } from '@aztec/foundation/timer'; +import { TestContract } from '@aztec/noir-test-contracts.js/Test'; +import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; + +import { jest } from '@jest/globals'; +import { privateKeyToAccount } from 'viem/accounts'; + +import { type EndToEndContext, getPrivateKeyFromIndex } from '../fixtures/utils.js'; +import { TestWallet } from '../test-wallet/test_wallet.js'; +import { proveInteraction } from '../test-wallet/utils.js'; +import { EpochsTestContext } from './epochs_test.js'; + +jest.setTimeout(1000 * 60 * 20); + +const NODE_COUNT = 4; + +/** + * Number of txs to feed one-by-one during early sub-slots. + * These are sent at the start of each sub-slot so each block picks up exactly one. + */ +const EARLY_TX_COUNT = 2; + +/** + * Number of txs to dump into the mempool right before the last sub-slot. + * With redistribution working, the last block should have enough budget to include all of them. + * Without redistribution, the per-block gas cap starves the last block. + */ +const LATE_TX_COUNT = 4; + +/** Total txs pre-proved before the test begins. */ +const TOTAL_TX_COUNT = EARLY_TX_COUNT + LATE_TX_COUNT; + +/** + * Verifies that checkpoint budget redistribution allows late transactions to fit in the last block + * when earlier blocks in the checkpoint were light. + * + * The test configures a tight per-checkpoint tx limit across multiple blocks per checkpoint. Early + * blocks each receive a single tx, leaving most of the budget unconsumed. All remaining txs are then + * submitted just before the last sub-slot. With redistribution working, the last block inherits the + * unused budget from earlier blocks and can include all late txs. Without redistribution, each block + * is capped at the static per-block limit and the late txs are left out. + * + * Success is verified by confirming that all late txs land in the same block. + */ +describe('e2e_epochs/epochs_mbps_redistribution', () => { + let context: EndToEndContext; + let logger: Logger; + let rollup: RollupContract; + let archiver: Archiver; + + let test: EpochsTestContext; + let validators: (Operator & { privateKey: `0x${string}` })[]; + let nodes: AztecNodeService[]; + let contract: TestContract; + let wallet: TestWallet; + let from: AztecAddress; + + /** + * Sets up validators and the test context with MBPS + redistribution config. + * Uses a tight `maxTxsPerCheckpoint` so that the redistribution logic is exercised. + */ + async function setupTest( + nodeConfigOverride?: (index: number) => Partial, + contextConfigOverride?: Record, + ) { + validators = times(NODE_COUNT, i => { + const privateKey = `0x${getPrivateKeyFromIndex(i + 3)!.toString('hex')}` as `0x${string}`; + const attester = EthAddress.fromString(privateKeyToAccount(privateKey).address); + return { attester, withdrawer: attester, privateKey, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) }; + }); + + // Timing calculation for 3 blocks per checkpoint with 8s sub-slots: + // - initializationOffset = 0.5s (test mode, ethereumSlotDuration < 8) + // - 3 blocks x 8s = 24s + // - checkpointFinalization = 0.5s (assemble) + 0 (p2p in test) + 2s (L1 publish) = 2.5s + // - finalBlockDuration = 8s (re-execution) + // - Total: 0.5 + 24 + 8 + 2.5 = 35s => use 36s + test = await EpochsTestContext.setup({ + numberOfAccounts: 1, + initialValidators: validators, + mockGossipSubNetwork: true, + disableAnvilTestWatcher: true, + startProverNode: true, + aztecEpochDuration: 4, + enforceTimeTable: true, + ethereumSlotDuration: 4, + aztecSlotDuration: 36, + blockDurationMs: 8000, + l1PublishingTime: 2, + attestationPropagationTime: 0.5, + aztecTargetCommitteeSize: 3, + // Allow empty blocks so that early sub-slots without txs still produce blocks. + minTxsPerBlock: 0, + // Tight checkpoint-level tx limit: forces redistribution to matter. + // With 3 blocks and multiplier 1.2: maxTxsPerBlock = ceil(TOTAL_TX_COUNT/3*1.2). + // The redistribution should cap early blocks, preserving budget for the last block. + maxTxsPerCheckpoint: TOTAL_TX_COUNT, + // PXE syncs on checkpointed chain tip. + pxeOpts: { syncChainTip: 'checkpointed' }, + ...contextConfigOverride, + }); + + ({ context, logger, rollup } = test); + wallet = context.wallet; + from = context.accounts[0]; + + // Halt the default sequencer (it's not a validator). + logger.warn(`Stopping sequencer in initial aztec node.`); + await context.sequencer!.stop(); + + // Start validator nodes (sequencers not started yet). + logger.warn(`Starting ${NODE_COUNT} validator nodes.`); + nodes = await asyncMap(validators, ({ privateKey }, i) => + test.createValidatorNode([privateKey], { dontStartSequencer: true, ...nodeConfigOverride?.(i) }), + ); + logger.warn(`Started ${NODE_COUNT} validator nodes.`, { validators: validators.map(v => v.attester.toString()) }); + + // Point the wallet at a validator node. + wallet.updateNode(nodes[0]); + archiver = nodes[0].getBlockSource() as Archiver; + + // Register the test contract. + contract = await test.registerTestContract(wallet); + logger.warn(`Test setup completed.`); + } + + afterEach(async () => { + jest.restoreAllMocks(); + await test?.teardown(); + }); + + it('redistributes checkpoint budget so late txs fit in the last block', async () => { + await setupTest(); + + // Pre-prove all transactions up front. + logger.warn(`Pre-proving ${TOTAL_TX_COUNT} transactions`); + const provenTxs = await timesAsync(TOTAL_TX_COUNT, i => + proveInteraction(wallet, contract.methods.emit_nullifier(new Fr(i + 1)), { from }), + ); + logger.warn(`Pre-proved ${provenTxs.length} transactions`); + + // Warp to just before the next L2 slot so sequencers start building promptly. + const currentSlot = await rollup.getSlotNumber(); + const nextSlot = SlotNumber(currentSlot + 1); + const slotStartTimestamp = getTimestampForSlot(nextSlot, test.constants); + // Warp to one L1 slot before the L2 slot starts (= the sequencer's build start). + const warpTo = slotStartTimestamp - BigInt(test.L1_BLOCK_TIME_IN_S); + logger.warn(`Warping to L1 timestamp ${warpTo} (one L1 slot before L2 slot ${nextSlot})`); + await waitUntilL1Timestamp(test.l1Client, warpTo, undefined, 60); + + // Start sequencers. + await Promise.all(nodes.map(n => n.getSequencer()!.start())); + logger.warn(`Started all sequencers`); + + // Feed one tx per sub-slot for the early blocks, waiting for each to be proposed before sending the next. + const earlyTxHashes = []; + for (let i = 0; i < EARLY_TX_COUNT; i++) { + logger.warn(`Sending early transaction ${i + 1}/${EARLY_TX_COUNT}`); + const txHash = await provenTxs[i].send({ wait: NO_WAIT }); + earlyTxHashes.push(txHash); + // Wait until the tx is proposed (mined) before sending the next one. + await retryUntil( + async () => (await Promise.all(nodes.map(n => n.getTxReceipt(txHash)))).some(receipt => receipt.isMined()), + 'tx proposed', + 30, + 0.5, + ); + logger.warn(`Early transaction ${i + 1}/${EARLY_TX_COUNT} confirmed proposed`); + } + logger.warn(`Sent ${earlyTxHashes.length} early transactions`); + + // Right before the last sub-slot, dump all remaining txs. + // With redistribution working, the last block's budget should be generous + // enough (early blocks consumed little), and all late txs should fit. + logger.warn(`Sending ${LATE_TX_COUNT} late transactions before the last sub-slot`); + const lateTxHashes = await Promise.all(provenTxs.slice(EARLY_TX_COUNT).map(tx => tx.send({ wait: NO_WAIT }))); + logger.warn(`Sent ${lateTxHashes.length} late transactions`); + + // Wait for ALL txs to be mined. + const allTxHashes = [...earlyTxHashes, ...lateTxHashes]; + const timeout = test.L2_SLOT_DURATION_IN_S * 5; + logger.warn(`Waiting for all ${allTxHashes.length} transactions to be mined (timeout=${timeout}s)`); + await executeTimeout( + () => Promise.all(allTxHashes.map(txHash => waitForTx(nodes[0], txHash, { timeout }))), + timeout * 1000, + ); + logger.warn(`All transactions have been mined`); + + // Verify that all late txs landed in the same block. + // This confirms the last block received the redistributed budget and could fit them all. + const lateReceipts = await Promise.all(lateTxHashes.map(h => nodes[0].getTxReceipt(h))); + const lateBlockNumbers = lateReceipts.map(r => r.blockNumber); + logger.warn(`Late tx block numbers: ${lateBlockNumbers.join(', ')}`); + expect(new Set(lateBlockNumbers).size).toBe(1); + }); + + /** + * Verifies that validators do NOT apply the proposer's fair-share multiplier when re-executing blocks. + * + * Two of the four validator nodes are configured with a very large `perBlockAllocationMultiplier` (10), + * allowing their proposer to pack multiple txs into a single block. The other two keep the default + * multiplier (1.2), which limits them to 1 tx per block given the tight `maxTxsPerCheckpoint`. + * + * With `maxTxsPerCheckpoint = 2` and 3 blocks per checkpoint: + * - Normal multiplier (1.2): per-block limit = ceil(2/3 * 1.2) = ceil(0.8) = 1 tx + * - High multiplier (10): per-block limit = ceil(2/3 * 10) = ceil(6.67) = 7 txs (capped by remaining = 2) + * + * The test watches checkpoints and identifies the proposer for each slot via EpochCache. + * It waits until it observes both: + * - A checkpoint by a high-multiplier proposer with at least one block having >1 tx + * - A checkpoint by a normal-multiplier proposer with all blocks having at most 1 tx + * + * If validators incorrectly applied their own multiplier during re-execution, checkpoints built by + * high-multiplier proposers would fail attestation and the chain would stall. + */ + it('validators accept blocks built with a larger proposer multiplier (no fair-share re-execution)', async () => { + const HIGH_MULTIPLIER = 10; + const MAX_TXS_PER_CHECKPOINT = 2; + + // Nodes 0 and 1 get a very large multiplier; nodes 2 and 3 keep the default (1.2). + await setupTest(i => (i < 2 ? { perBlockAllocationMultiplier: HIGH_MULTIPLIER } : {}), { + maxTxsPerCheckpoint: MAX_TXS_PER_CHECKPOINT, + }); + logger.warn( + `Set perBlockAllocationMultiplier=${HIGH_MULTIPLIER} on nodes 0,1; maxTxsPerCheckpoint=${MAX_TXS_PER_CHECKPOINT}`, + ); + + // Pre-prove an initial batch of transactions. + const INITIAL_TX_COUNT = 4; + let nullifierCounter = 200; + logger.warn(`Pre-proving ${INITIAL_TX_COUNT} initial transactions`); + const initialProvenTxs = await timesAsync(INITIAL_TX_COUNT, () => + proveInteraction(wallet, contract.methods.emit_nullifier(new Fr(nullifierCounter++)), { from }), + ); + logger.warn(`Pre-proved ${initialProvenTxs.length} transactions`); + + // Warp to just before the next L2 slot so sequencers start building promptly. + const currentSlot = await rollup.getSlotNumber(); + const nextSlot = SlotNumber(currentSlot + 1); + const slotStartTimestamp = getTimestampForSlot(nextSlot, test.constants); + const warpTo = slotStartTimestamp - BigInt(test.L1_BLOCK_TIME_IN_S); + logger.warn(`Warping to L1 timestamp ${warpTo} (one L1 slot before L2 slot ${nextSlot})`); + await waitUntilL1Timestamp(test.l1Client, warpTo, undefined, 60); + + // Start sequencers and send the initial batch. + await Promise.all(nodes.map(n => n.getSequencer()!.start())); + logger.warn(`Started all sequencers`); + + logger.warn(`Sending ${initialProvenTxs.length} initial transactions`); + await Promise.all(initialProvenTxs.map(tx => tx.send({ wait: NO_WAIT }))); + logger.warn(`Sent initial transactions`); + + // Background loop: keep the mempool topped up so proposers always have txs to include. + let done = false; + const keepMempoolFull = async () => { + while (!done) { + try { + const pendingCount = await nodes[0].getPendingTxCount(); + if (pendingCount < 3) { + const tx = await proveInteraction(wallet, contract.methods.emit_nullifier(new Fr(nullifierCounter++)), { + from, + }); + await tx.send({ wait: NO_WAIT }); + logger.verbose(`Topped up mempool (was ${pendingCount}, nullifier=${nullifierCounter - 1})`); + } + } catch (err) { + logger.verbose(`Mempool top-up error (will retry): ${err}`); + } + await sleep(1000); + } + }; + void keepMempoolFull(); + + // Build a lookup from attester address to validator index for proposer identification. + const attesterToIndex = new Map(); + for (let i = 0; i < validators.length; i++) { + attesterToIndex.set(validators[i].attester.toString().toLowerCase(), i); + } + + // Watch checkpoints and identify the proposer via EpochCache (L1 committee selection). + let seenHighMultiplier = false; + let seenNormalMultiplier = false; + let lastSeenCheckpoint = CheckpointNumber(0); + + const timeoutSeconds = test.L2_SLOT_DURATION_IN_S * 10; + logger.warn(`Watching checkpoints for up to ${timeoutSeconds}s until both proposer types are observed`); + + try { + await retryUntil( + async () => { + const checkpoints = await archiver.getCheckpoints(CheckpointNumber(1), 50); + for (const pc of checkpoints) { + if (pc.checkpoint.number <= lastSeenCheckpoint) { + continue; + } + lastSeenCheckpoint = pc.checkpoint.number; + + const blockTxCounts = pc.checkpoint.blocks.map(b => b.body.txEffects.length); + const totalTxs = blockTxCounts.reduce((a, b) => a + b, 0); + + // Skip empty checkpoints (no txs to analyze). + if (totalTxs === 0) { + logger.warn(`Checkpoint ${pc.checkpoint.number}: empty, skipping`); + continue; + } + + // Identify the proposer for this checkpoint's slot via EpochCache. + const slot = pc.checkpoint.header.slotNumber; + const proposer = await test.epochCache.getProposerAttesterAddressInSlot(slot); + if (!proposer) { + logger.warn(`Checkpoint ${pc.checkpoint.number}: could not determine proposer for slot ${slot}`); + continue; + } + const proposerIndex = attesterToIndex.get(proposer.toString().toLowerCase()); + const isHighMultiplier = proposerIndex !== undefined && proposerIndex < 2; + + logger.warn( + `Checkpoint ${pc.checkpoint.number} slot ${slot}: proposer=${proposer} (index=${proposerIndex}, ` + + `${isHighMultiplier ? 'HIGH' : 'NORMAL'} multiplier), blockTxCounts=[${blockTxCounts.join(',')}]`, + ); + + if (isHighMultiplier) { + // High-multiplier proposer: at least one block should have >1 tx. + const hasMultiTxBlock = blockTxCounts.some(count => count > 1); + if (hasMultiTxBlock) { + seenHighMultiplier = true; + logger.warn(`Observed high-multiplier checkpoint with multi-tx block`); + } + } else if (proposerIndex !== undefined) { + // Normal-multiplier proposer: each block should have at most 1 tx. + for (const count of blockTxCounts) { + expect(count).toBeLessThanOrEqual(1); + } + seenNormalMultiplier = true; + logger.warn(`Observed normal-multiplier checkpoint with per-block tx counts <= 1`); + } + } + + return seenHighMultiplier && seenNormalMultiplier ? true : undefined; + }, + 'both proposer types observed', + timeoutSeconds, + 1, + ); + } finally { + done = true; + } + + logger.warn( + `Test passed: observed checkpoints from both high-multiplier and normal-multiplier proposers. ` + + `High-multiplier proposers packed >1 tx per block; normal proposers used at most 1 tx per block.`, + ); + expect(seenHighMultiplier).toBe(true); + expect(seenNormalMultiplier).toBe(true); + }); +}); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.test.ts b/yarn-project/sequencer-client/src/client/sequencer-client.test.ts deleted file mode 100644 index f2eb896e4746..000000000000 --- a/yarn-project/sequencer-client/src/client/sequencer-client.test.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; -import { createLogger } from '@aztec/foundation/log'; - -import type { SequencerClientConfig } from '../config.js'; -import { computeBlockLimits } from './sequencer-client.js'; - -describe('computeBlockLimits', () => { - const log = createLogger('test'); - - /** Builds a minimal config with only the fields needed by computeBlockLimits. */ - function makeConfig(overrides: Partial = {}): SequencerClientConfig { - return { - ethereumSlotDuration: 12, - aztecSlotDuration: 72, - attestationPropagationTime: 3, - enforceTimeTable: true, - // No blockDurationMs -> single block mode -> maxNumberOfBlocks = 1 - ...overrides, - } as SequencerClientConfig; - } - - describe('L2 gas', () => { - it('derives maxL2BlockGas from rollupManaLimit when not explicitly set', () => { - const rollupManaLimit = 1_000_000; - // Single block mode (maxNumberOfBlocks=1), default multiplier=1.2: - // min(1_000_000, ceil(1_000_000 / 1 * 1.2)) = min(1_000_000, 1_200_000) = 1_000_000 - const result = computeBlockLimits(makeConfig(), rollupManaLimit, 12, log); - expect(result.maxL2BlockGas).toBe(rollupManaLimit); - }); - - it('uses explicit maxL2BlockGas when within rollupManaLimit', () => { - const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 500_000 }), 1_000_000, 12, log); - expect(result.maxL2BlockGas).toBe(500_000); - }); - - it('caps explicit maxL2BlockGas at rollupManaLimit', () => { - const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 2_000_000 }), 1_000_000, 12, log); - expect(result.maxL2BlockGas).toBe(1_000_000); - }); - }); - - describe('DA gas', () => { - const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; - - it('derives maxDABlockGas from DA checkpoint limit when not explicitly set', () => { - // Single block mode (maxNumberOfBlocks=1), default multiplier=1.2: - // min(daLimit, ceil(daLimit / 1 * 1.2)) = min(daLimit, daLimit * 1.2) = daLimit - const result = computeBlockLimits(makeConfig(), 1_000_000, 12, log); - expect(result.maxDABlockGas).toBe(daLimit); - }); - - it('uses explicit maxDABlockGas when within DA checkpoint limit', () => { - const explicit = Math.floor(daLimit / 2); - const result = computeBlockLimits(makeConfig({ maxDABlockGas: explicit }), 1_000_000, 12, log); - expect(result.maxDABlockGas).toBe(explicit); - }); - - it('caps explicit maxDABlockGas at DA checkpoint limit', () => { - const result = computeBlockLimits(makeConfig({ maxDABlockGas: daLimit + 100_000 }), 1_000_000, 12, log); - expect(result.maxDABlockGas).toBe(daLimit); - }); - }); - - describe('TX count', () => { - it('uses explicit maxTxsPerBlock when set', () => { - const result = computeBlockLimits(makeConfig({ maxTxsPerBlock: 10 }), 1_000_000, 12, log); - expect(result.maxTxsPerBlock).toBe(10); - }); - - it('caps maxTxsPerBlock at maxTxsPerCheckpoint', () => { - const result = computeBlockLimits( - makeConfig({ maxTxsPerBlock: 50, maxTxsPerCheckpoint: 30 }), - 1_000_000, - 12, - log, - ); - expect(result.maxTxsPerBlock).toBe(30); - }); - - it('derives maxTxsPerBlock from maxTxsPerCheckpoint when per-block not set', () => { - // Multi-block mode with maxNumberOfBlocks=5, multiplier=1.2: - // min(100, ceil(100 / 5 * 1.2)) = min(100, 24) = 24 - const config = makeConfig({ - maxTxsPerCheckpoint: 100, - blockDurationMs: 8000, - }); - const result = computeBlockLimits(config, 1_000_000, 12, log); - expect(result.maxTxsPerBlock).toBe(24); - }); - }); - - describe('multi-block mode', () => { - it('distributes budget across blocks in multi-block mode', () => { - // With blockDurationMs=8000, aztecSlotDuration=72, ethereumSlotDuration=12, - // attestationPropagationTime=3, l1PublishingTime=12: - // checkpointFinalizationTime = 1 + 3*2 + 12 = 19 - // timeReservedAtEnd = 8 + 19 = 27 - // timeAvailableForBlocks = 72 - 1 - 27 = 44 - // maxNumberOfBlocks = floor(44 / 8) = 5 - // With multiplier=1.2 and rollupManaLimit=1_000_000: - // maxL2BlockGas = min(1_000_000, ceil(1_000_000 / 5 * 1.2)) = min(1_000_000, 240_000) = 240_000 - const config = makeConfig({ blockDurationMs: 8000 }); - const result = computeBlockLimits(config, 1_000_000, 12, log); - expect(result.maxL2BlockGas).toBe(240_000); - - const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; - expect(result.maxDABlockGas).toBe(Math.min(daLimit, Math.ceil((daLimit / 5) * 1.2))); - }); - - it('returns maxBlocksPerCheckpoint from timetable', () => { - const config = makeConfig({ blockDurationMs: 8000 }); - const result = computeBlockLimits(config, 1_000_000, 12, log); - expect(result.maxBlocksPerCheckpoint).toBe(5); - }); - }); -}); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 0efeafb01f10..6c00c41db92c 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -19,15 +19,10 @@ import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { L1Metrics, type TelemetryClient } from '@aztec/telemetry-client'; import { FullNodeCheckpointsBuilder, NodeKeystoreAdapter, type ValidatorClient } from '@aztec/validator-client'; -import { - DefaultSequencerConfig, - type SequencerClientConfig, - getPublisherConfigFromSequencerConfig, -} from '../config.js'; +import { type SequencerClientConfig, getPublisherConfigFromSequencerConfig } from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { SequencerPublisherFactory } from '../publisher/sequencer-publisher-factory.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; -import { SequencerTimetable } from '../sequencer/timetable.js'; /** * Encapsulates the full sequencer and publisher. @@ -160,12 +155,7 @@ export class SequencerClient { const l1PublishingTimeBasedOnChain = isAnvilTestChain(config.l1ChainId) ? 1 : ethereumSlotDuration; const l1PublishingTime = config.l1PublishingTime ?? l1PublishingTimeBasedOnChain; - const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock, maxBlocksPerCheckpoint } = computeBlockLimits( - config, - rollupManaLimit, - l1PublishingTime, - log, - ); + const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock } = capPerBlockLimits(config, rollupManaLimit, log); const l1Constants = { l1GenesisTime, slotDuration: Number(slotDuration), ethereumSlotDuration, rollupManaLimit }; @@ -183,7 +173,7 @@ export class SequencerClient { deps.dateProvider, epochCache, rollupContract, - { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas, maxTxsPerBlock, maxBlocksPerCheckpoint }, + { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }, telemetryClient, log, ); @@ -248,88 +238,39 @@ export class SequencerClient { } /** - * Computes per-block L2 gas, DA gas, and TX count budgets based on the L1 rollup limits and the timetable. - * If the user explicitly set a limit, it is capped at the corresponding checkpoint limit. - * Otherwise, derives it as (checkpointLimit / maxBlocks) * multiplier, capped at the checkpoint limit. + * Caps operator-provided per-block limits at checkpoint-level limits. + * Returns undefined for any limit the operator didn't set — the checkpoint builder handles redistribution. */ -export function computeBlockLimits( +function capPerBlockLimits( config: SequencerClientConfig, rollupManaLimit: number, - l1PublishingTime: number, log: ReturnType, -): { maxL2BlockGas: number; maxDABlockGas: number; maxTxsPerBlock: number; maxBlocksPerCheckpoint: number } { - const maxNumberOfBlocks = new SequencerTimetable({ - ethereumSlotDuration: config.ethereumSlotDuration, - aztecSlotDuration: config.aztecSlotDuration, - l1PublishingTime, - p2pPropagationTime: config.attestationPropagationTime, - blockDurationMs: config.blockDurationMs, - enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, - }).maxNumberOfBlocks; - - const multiplier = config.perBlockAllocationMultiplier ?? DefaultSequencerConfig.perBlockAllocationMultiplier; - - // Compute maxL2BlockGas - let maxL2BlockGas: number; - if (config.maxL2BlockGas !== undefined) { - if (config.maxL2BlockGas > rollupManaLimit) { - log.warn( - `Provided MAX_L2_BLOCK_GAS ${config.maxL2BlockGas} exceeds L1 rollup mana limit ${rollupManaLimit} (capping)`, - ); - maxL2BlockGas = rollupManaLimit; - } else { - maxL2BlockGas = config.maxL2BlockGas; - } - } else { - maxL2BlockGas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxNumberOfBlocks) * multiplier)); +): { maxL2BlockGas: number | undefined; maxDABlockGas: number | undefined; maxTxsPerBlock: number | undefined } { + let maxL2BlockGas = config.maxL2BlockGas; + if (maxL2BlockGas !== undefined && maxL2BlockGas > rollupManaLimit) { + log.warn(`Provided MAX_L2_BLOCK_GAS ${maxL2BlockGas} exceeds rollup mana limit ${rollupManaLimit} (capping)`); + maxL2BlockGas = rollupManaLimit; } - // Compute maxDABlockGas - const daCheckpointLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; - let maxDABlockGas: number; - if (config.maxDABlockGas !== undefined) { - if (config.maxDABlockGas > daCheckpointLimit) { - log.warn( - `Provided MAX_DA_BLOCK_GAS ${config.maxDABlockGas} exceeds DA checkpoint limit ${daCheckpointLimit} (capping)`, - ); - maxDABlockGas = daCheckpointLimit; - } else { - maxDABlockGas = config.maxDABlockGas; - } - } else { - maxDABlockGas = Math.min(daCheckpointLimit, Math.ceil((daCheckpointLimit / maxNumberOfBlocks) * multiplier)); + let maxDABlockGas = config.maxDABlockGas; + if (maxDABlockGas !== undefined && maxDABlockGas > MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT) { + log.warn( + `Provided MAX_DA_BLOCK_GAS ${maxDABlockGas} exceeds DA checkpoint limit ${MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT} (capping)`, + ); + maxDABlockGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; } - // Compute maxTxsPerBlock - const defaultMaxTxsPerBlock = 32; - let maxTxsPerBlock: number; - if (config.maxTxsPerBlock !== undefined) { - if (config.maxTxsPerCheckpoint !== undefined && config.maxTxsPerBlock > config.maxTxsPerCheckpoint) { - log.warn( - `Provided MAX_TX_PER_BLOCK ${config.maxTxsPerBlock} exceeds MAX_TX_PER_CHECKPOINT ${config.maxTxsPerCheckpoint} (capping)`, - ); - maxTxsPerBlock = config.maxTxsPerCheckpoint; - } else { - maxTxsPerBlock = config.maxTxsPerBlock; - } - } else if (config.maxTxsPerCheckpoint !== undefined) { - maxTxsPerBlock = Math.min( - config.maxTxsPerCheckpoint, - Math.ceil((config.maxTxsPerCheckpoint / maxNumberOfBlocks) * multiplier), + let maxTxsPerBlock = config.maxTxsPerBlock; + if ( + maxTxsPerBlock !== undefined && + config.maxTxsPerCheckpoint !== undefined && + maxTxsPerBlock > config.maxTxsPerCheckpoint + ) { + log.warn( + `Provided MAX_TX_PER_BLOCK ${maxTxsPerBlock} exceeds MAX_TX_PER_CHECKPOINT ${config.maxTxsPerCheckpoint} (capping)`, ); - } else { - maxTxsPerBlock = defaultMaxTxsPerBlock; + maxTxsPerBlock = config.maxTxsPerCheckpoint; } - log.info(`Computed block limits L2=${maxL2BlockGas} DA=${maxDABlockGas} maxTxs=${maxTxsPerBlock}`, { - maxL2BlockGas, - maxDABlockGas, - maxTxsPerBlock, - rollupManaLimit, - daCheckpointLimit, - maxNumberOfBlocks, - multiplier, - }); - - return { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock, maxBlocksPerCheckpoint: maxNumberOfBlocks }; + return { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }; } diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index e0ce28583791..ec3d00e9297b 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -119,9 +119,6 @@ export const sequencerConfigMappings: ConfigMappingsType = { 'Redistribute remaining checkpoint budget evenly across remaining blocks instead of allowing a single block to consume the entire remaining budget.', ...booleanConfigHelper(DefaultSequencerConfig.redistributeCheckpointBudget), }, - maxBlocksPerCheckpoint: { - description: 'Computed max number of blocks per checkpoint from timetable.', - }, coinbase: { env: 'COINBASE', parseEnv: (val: string) => (val ? EthAddress.fromString(val) : undefined), diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts index 69ba62b93dc1..bed1f5c8cca4 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts @@ -14,8 +14,8 @@ import type { L2Block, L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { GasFees } from '@aztec/stdlib/gas'; import type { + BlockBuilderOptions, MerkleTreeWriteOperations, - PublicProcessorLimits, ResolvedSequencerConfig, WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; @@ -80,7 +80,7 @@ class TimingAwareMockCheckpointBuilder extends MockCheckpointBuilder { pendingTxs: Iterable | AsyncIterable, blockNumber: BlockNumber, timestamp: bigint, - opts: PublicProcessorLimits, + opts: BlockBuilderOptions, ): Promise { const startTime = this.getSecondsIntoSlot(); diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index 1967bd75eed7..9e24324d937e 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -34,8 +34,8 @@ import { type Checkpoint, validateCheckpoint } from '@aztec/stdlib/checkpoint'; import { getSlotStartBuildTimestamp } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import { + type BlockBuilderOptions, InsufficientValidTxsError, - type PublicProcessorLimits, type ResolvedSequencerConfig, type WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; @@ -270,7 +270,8 @@ export class CheckpointProposalJob implements Traceable { this.setStateFn(SequencerState.ASSEMBLING_CHECKPOINT, this.slot); const checkpoint = await checkpointBuilder.completeCheckpoint(); - // Final validation round for the checkpoint before we propose it, just for safety + // Final validation: per-block limits are only checked if the operator set them explicitly. + // Otherwise, checkpoint-level budgets were already enforced by the redistribution logic. try { validateCheckpoint(checkpoint, { rollupManaLimit: this.l1Constants.rollupManaLimit, @@ -574,11 +575,11 @@ export class CheckpointProposalJob implements Traceable { ); this.setStateFn(SequencerState.CREATING_BLOCK, this.slot); - // Per-block limits derived at startup by computeBlockLimits(), further capped + // Per-block limits are operator overrides (from SEQ_MAX_L2_BLOCK_GAS etc.) further capped // by remaining checkpoint-level budgets inside CheckpointBuilder before each block is built. // minValidTxs is passed into the builder so it can reject the block *before* updating state. const minValidTxs = forceCreate ? 0 : (this.config.minValidTxsPerBlock ?? minTxs); - const blockBuilderOptions: PublicProcessorLimits & { minValidTxs?: number } = { + const blockBuilderOptions: BlockBuilderOptions = { maxTransactions: this.config.maxTxsPerBlock, maxBlockGas: this.config.maxL2BlockGas !== undefined || this.config.maxDABlockGas !== undefined @@ -587,6 +588,8 @@ export class CheckpointProposalJob implements Traceable { deadline: buildDeadline, isBuildingProposal: true, minValidTxs, + maxBlocksPerCheckpoint: this.timetable.maxNumberOfBlocks, + perBlockAllocationMultiplier: this.config.perBlockAllocationMultiplier, }; // Actually build the block by executing txs. The builder throws InsufficientValidTxsError @@ -657,7 +660,7 @@ export class CheckpointProposalJob implements Traceable { pendingTxs: AsyncIterable, blockNumber: BlockNumber, blockTimestamp: bigint, - blockBuilderOptions: PublicProcessorLimits & { minValidTxs?: number }, + blockBuilderOptions: BlockBuilderOptions, ) { try { const workTimer = new Timer(); diff --git a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts index 27b6bf911b07..f0a6afca82cc 100644 --- a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts +++ b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts @@ -4,11 +4,11 @@ import { unfreeze } from '@aztec/foundation/types'; import { L2Block } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; import type { + BlockBuilderOptions, FullNodeBlockBuilderConfig, ICheckpointBlockBuilder, ICheckpointsBuilder, MerkleTreeWriteOperations, - PublicProcessorLimits, } from '@aztec/stdlib/interfaces/server'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { makeAppendOnlyTreeSnapshot } from '@aztec/stdlib/testing'; @@ -32,7 +32,7 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { public buildBlockCalls: Array<{ blockNumber: BlockNumber; timestamp: bigint; - opts: PublicProcessorLimits & { minValidTxs?: number }; + opts: BlockBuilderOptions; }> = []; /** Track all consumed transaction hashes across buildBlock calls */ public consumedTxHashes: Set = new Set(); @@ -74,7 +74,7 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { pendingTxs: Iterable | AsyncIterable, blockNumber: BlockNumber, timestamp: bigint, - opts: PublicProcessorLimits & { minValidTxs?: number }, + opts: BlockBuilderOptions, ): Promise { this.buildBlockCalls.push({ blockNumber, timestamp, opts }); diff --git a/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts b/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts index 7b8b7004db56..aa106b929d17 100644 --- a/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts +++ b/yarn-project/slasher/src/watchers/epoch_prune_watcher.test.ts @@ -186,7 +186,7 @@ describe('EpochPruneWatcher', () => { [tx], block.header.globalVariables.blockNumber, block.header.globalVariables.timestamp, - {}, + { isBuildingProposal: false, minValidTxs: 0 }, ); }); @@ -246,7 +246,7 @@ describe('EpochPruneWatcher', () => { [tx], blockFromL1.header.globalVariables.blockNumber, blockFromL1.header.globalVariables.timestamp, - {}, + { isBuildingProposal: false, minValidTxs: 0 }, ); }); }); diff --git a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts index 0de0f6b27f65..b0eeb2b80709 100644 --- a/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts +++ b/yarn-project/slasher/src/watchers/epoch_prune_watcher.ts @@ -211,7 +211,10 @@ export class EpochPruneWatcher extends (EventEmitter as new () => WatcherEmitter } const gv = blockFromL1.header.globalVariables; - const { block, failedTxs, numTxs } = await checkpointBuilder.buildBlock(txs, gv.blockNumber, gv.timestamp, {}); + const { block, failedTxs, numTxs } = await checkpointBuilder.buildBlock(txs, gv.blockNumber, gv.timestamp, { + isBuildingProposal: false, + minValidTxs: 0, + }); if (numTxs !== txs.length) { // This should be detected by state mismatch, but this makes it easier to debug. diff --git a/yarn-project/stdlib/src/checkpoint/validate.ts b/yarn-project/stdlib/src/checkpoint/validate.ts index 1ceb9fa4c102..4bfceeaf3d59 100644 --- a/yarn-project/stdlib/src/checkpoint/validate.ts +++ b/yarn-project/stdlib/src/checkpoint/validate.ts @@ -36,7 +36,7 @@ export function validateCheckpoint( ): void { validateCheckpointStructure(checkpoint); validateCheckpointLimits(checkpoint, opts); - validateCheckpointBlocksGasLimits(checkpoint, opts); + validateCheckpointBlocksLimits(checkpoint, opts); } /** @@ -125,7 +125,7 @@ export function validateCheckpointStructure(checkpoint: Checkpoint): void { } /** Validates checkpoint blocks gas limits */ -function validateCheckpointBlocksGasLimits( +function validateCheckpointBlocksLimits( checkpoint: Checkpoint, opts: { maxL2BlockGas?: number; diff --git a/yarn-project/stdlib/src/interfaces/block-builder.ts b/yarn-project/stdlib/src/interfaces/block-builder.ts index aa2857072c92..6a2f49bb4209 100644 --- a/yarn-project/stdlib/src/interfaces/block-builder.ts +++ b/yarn-project/stdlib/src/interfaces/block-builder.ts @@ -35,7 +35,8 @@ export interface IBlockFactory extends ProcessedTxHandler { setBlockCompleted(expectedBlockHeader?: BlockHeader): Promise; } -export interface PublicProcessorLimits { +/** Limits passed to the public processor for tx processing within a block. */ +export type PublicProcessorLimits = { /** Maximum number of txs to process. */ maxTransactions?: number; /** L2 and DA gas limits. */ @@ -46,7 +47,30 @@ export interface PublicProcessorLimits { deadline?: Date; /** Whether this processor is building a proposal (as opposed to re-executing one). Skipping txs due to gas or blob limits is only done during proposal building. */ isBuildingProposal?: boolean; -} +}; + +/** Base fields shared by both proposer and validator block builder options. */ +type BlockBuilderOptionsBase = PublicProcessorLimits & { + /** Minimum number of successfully processed txs required. Block is rejected if fewer succeed. */ + minValidTxs: number; +}; + +/** Proposer mode: redistribution params are required. */ +type ProposerBlockBuilderOptions = BlockBuilderOptionsBase & { + isBuildingProposal: true; + /** Maximum number of blocks per checkpoint, derived from the timetable. */ + maxBlocksPerCheckpoint: number; + /** Per-block gas budget multiplier. Budget = (remaining / remainingBlocks) * multiplier. */ + perBlockAllocationMultiplier: number; +}; + +/** Validator mode: no redistribution params needed. */ +type ValidatorBlockBuilderOptions = BlockBuilderOptionsBase & { + isBuildingProposal: false; +}; + +/** Options for building a block within a checkpoint. When proposing, redistribution params are required. */ +export type BlockBuilderOptions = ProposerBlockBuilderOptions | ValidatorBlockBuilderOptions; export interface PublicProcessorValidator { preprocessValidator?: TxValidator; @@ -64,9 +88,6 @@ export type FullNodeBlockBuilderConfig = Pick; export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[] = [ @@ -82,9 +103,6 @@ export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[ 'maxL2BlockGas', 'maxDABlockGas', 'rollupManaLimit', - 'redistributeCheckpointBudget', - 'perBlockAllocationMultiplier', - 'maxBlocksPerCheckpoint', ] as const; /** Thrown when the number of successfully processed transactions is below the required minimum. */ @@ -115,7 +133,7 @@ export interface ICheckpointBlockBuilder { pendingTxs: Iterable | AsyncIterable, blockNumber: BlockNumber, timestamp: bigint, - opts: PublicProcessorLimits & { minValidTxs?: number }, + opts: BlockBuilderOptions, ): Promise; } diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index b986445a4c6a..55f9142aca33 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -29,8 +29,6 @@ export interface SequencerConfig { perBlockAllocationMultiplier?: number; /** Redistribute remaining checkpoint budget evenly across remaining blocks instead of allowing a single block to consume the entire remaining budget. */ redistributeCheckpointBudget?: boolean; - /** Computed max number of blocks per checkpoint from timetable. */ - maxBlocksPerCheckpoint?: number; /** Recipient of block reward. */ coinbase?: EthAddress; /** Address to receive fees. */ @@ -99,7 +97,6 @@ export const SequencerConfigSchema = zodFor()( maxDABlockGas: z.number().optional(), perBlockAllocationMultiplier: z.number().optional(), redistributeCheckpointBudget: z.boolean().optional(), - maxBlocksPerCheckpoint: z.number().optional(), coinbase: schemas.EthAddress.optional(), feeRecipient: schemas.AztecAddress.optional(), acvmWorkingDirectory: z.string().optional(), @@ -148,9 +145,7 @@ type SequencerConfigOptionalKeys = | 'maxTxsPerCheckpoint' | 'maxL2BlockGas' | 'maxDABlockGas' - | 'perBlockAllocationMultiplier' - | 'redistributeCheckpointBudget' - | 'maxBlocksPerCheckpoint'; + | 'redistributeCheckpointBudget'; export type ResolvedSequencerConfig = Prettify< Required> & Pick diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index 0974b95f94b1..5f1ca28dcf5e 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -237,13 +237,11 @@ L1 enforces gas and blob capacity per checkpoint. The node enforces these during ### Per-block budgets -Per-block budgets prevent one block from consuming the entire checkpoint budget. +Per-block budgets prevent one block from consuming the entire checkpoint budget. The checkpoint builder dynamically computes per-block limits before each block based on the remaining checkpoint budget and the number of remaining blocks. -**Proposer**: `computeBlockLimits()` derives budgets at startup as `min(checkpointLimit, ceil(checkpointLimit / maxBlocks * multiplier))`, where `maxBlocks` comes from the timetable and `multiplier` defaults to 1.2. The multiplier greater than 1 allows early blocks to use more than their even share of the checkpoint budget, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. Operators can override via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` / `SEQ_MAX_TX_PER_BLOCK` (capped at checkpoint limits). Per-block TX limits follow the same derivation pattern when `SEQ_MAX_TX_PER_CHECKPOINT` is set. +**Proposer**: When building a proposal (`isBuildingProposal: true`), the `CheckpointProposalJob` passes `maxBlocksPerCheckpoint` (from the timetable) and `perBlockAllocationMultiplier` (default 1.2) via opts to `CheckpointBuilder.buildBlock`. The builder computes a fair share as `min(perBlockLimit, ceil(remainingBudget / remainingBlocks * multiplier), remainingBudget)`. The multiplier greater than 1 allows early blocks to use more than their even share, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. As prior blocks consume budget, later blocks see tightened limits. This applies to all four dimensions (L2 gas, DA gas, blob fields, transaction count). Operators can set hard per-block caps via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` / `SEQ_MAX_TX_PER_BLOCK` (capped at checkpoint limits at startup); these act as additional upper bounds alongside the redistribution. -**Validator**: Optionally enforces per-block limits via `VALIDATOR_MAX_L2_BLOCK_GAS`, `VALIDATOR_MAX_DA_BLOCK_GAS`, and `VALIDATOR_MAX_TX_PER_BLOCK`. When set, these are passed to `buildBlock` during re-execution and to `validateCheckpoint` for final validation. When unset, no per-block limit is enforced for that dimension (checkpoint-level protocol limits still apply). These are independent of the `SEQ_` vars so operators can tune proposer and validation limits separately. - -**Checkpoint-level capping**: `CheckpointBuilder.capLimitsByCheckpointBudgets()` always runs before tx processing, capping per-block limits by the remaining checkpoint budget. When `SEQ_REDISTRIBUTE_CHECKPOINT_BUDGET` is enabled (default: true), the remaining budget is distributed evenly across remaining blocks with the multiplier applied: `min(perBlockLimit, ceil(remainingBudget / remainingBlocks * multiplier), remainingBudget)`. This prevents early blocks from consuming the entire checkpoint budget, producing smoother distribution. When disabled, each block can consume up to the full remaining budget, ie caps by `checkpointBudget - sum(used by prior blocks)`. This applies to all four dimensions (L2 gas, DA gas, blob fields, transaction count). Validators always cap by the total remaining. +**Validator**: When re-executing a proposal (`isBuildingProposal` unset), `capLimitsByCheckpointBudgets` only caps by the per-block limit and the total remaining checkpoint budget — no redistribution or multiplier is applied. This avoids false rejections due to differences between proposer and validator fair-share calculations. Validators can optionally set hard per-block limits via `VALIDATOR_MAX_L2_BLOCK_GAS`, `VALIDATOR_MAX_DA_BLOCK_GAS`, and `VALIDATOR_MAX_TX_PER_BLOCK`. When unset, no per-block limit is enforced (checkpoint-level protocol limits still apply). These are independent of the `SEQ_` vars so operators can tune proposer and validation limits separately. ### Per-transaction enforcement @@ -255,12 +253,12 @@ Per-block budgets prevent one block from consuming the entire checkpoint budget. | Variable | Default | Description | | --- | --- | --- | -| `SEQ_MAX_L2_BLOCK_GAS` | *auto* | Per-block L2 gas. Auto-derived from `rollupManaLimit / maxBlocks * multiplier`. | -| `SEQ_MAX_DA_BLOCK_GAS` | *auto* | Per-block DA gas. Auto-derived from checkpoint DA limit / maxBlocks * multiplier. | -| `SEQ_MAX_TX_PER_BLOCK` | *none* | Per-block tx count. If `SEQ_MAX_TX_PER_CHECKPOINT` is set and per-block is not, derived as `ceil(checkpointLimit / maxBlocks * multiplier)`. | -| `SEQ_MAX_TX_PER_CHECKPOINT` | *none* | Total txs across all blocks in a checkpoint. When set, per-block tx limit is derived from it (unless explicitly overridden) and checkpoint-level capping is enforced. | -| `SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER` | 1.2 | Multiplier for per-block budget computation. | -| `SEQ_REDISTRIBUTE_CHECKPOINT_BUDGET` | true | Redistribute remaining checkpoint budget evenly across remaining blocks instead of allowing one block to consume it all. | +| `SEQ_MAX_L2_BLOCK_GAS` | *none* | Hard per-block L2 gas cap. Capped at `rollupManaLimit` at startup. When unset, redistribution dynamically computes per-block limits. | +| `SEQ_MAX_DA_BLOCK_GAS` | *none* | Hard per-block DA gas cap. Capped at `MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT` at startup. When unset, redistribution handles it. | +| `SEQ_MAX_TX_PER_BLOCK` | *none* | Hard per-block tx count cap. Capped at `SEQ_MAX_TX_PER_CHECKPOINT` at startup (if set). | +| `SEQ_MAX_TX_PER_CHECKPOINT` | *none* | Total txs across all blocks in a checkpoint. When set, checkpoint-level capping and redistribution are enforced for tx count. | +| `SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER` | 1.2 | Multiplier for per-block budget redistribution. Passed via opts to the checkpoint builder during proposal building. | +| `SEQ_REDISTRIBUTE_CHECKPOINT_BUDGET` | true | Legacy flag; redistribution is now always active during proposal building and inactive during validation. | | `VALIDATOR_MAX_L2_BLOCK_GAS` | *none* | Per-block L2 gas limit for validation. Proposals exceeding this are rejected. | | `VALIDATOR_MAX_DA_BLOCK_GAS` | *none* | Per-block DA gas limit for validation. Proposals exceeding this are rejected. | | `VALIDATOR_MAX_TX_PER_BLOCK` | *none* | Per-block tx count limit for validation. Proposals exceeding this are rejected. | diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index 43c890bdafa8..d38bb7d9332e 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -567,6 +567,8 @@ export class BlockProposalHandler { ? new Gas(this.config.validateMaxDABlockGas ?? Infinity, this.config.validateMaxL2BlockGas ?? Infinity) : undefined; const result = await checkpointBuilder.buildBlock(txs, blockNumber, blockHeader.globalVariables.timestamp, { + isBuildingProposal: false, + minValidTxs: 0, deadline, expectedEndState: blockHeader.state, maxTransactions: this.config.validateMaxTxsPerBlock, diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index 9dab4a4778c9..cc48fc07aaf4 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -16,6 +16,7 @@ import { L2Block } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import { Gas, GasFees } from '@aztec/stdlib/gas'; import { + type BlockBuilderOptions, type FullNodeBlockBuilderConfig, InsufficientValidTxsError, type MerkleTreeWriteOperations, @@ -62,7 +63,7 @@ describe('CheckpointBuilder', () => { } /** Expose for testing */ - public testCapLimits(opts: PublicProcessorLimits) { + public testCapLimits(opts: BlockBuilderOptions) { return this.capLimitsByCheckpointBudgets(opts); } } @@ -86,7 +87,6 @@ describe('CheckpointBuilder', () => { l1ChainId: 1, rollupVersion: 1, rollupManaLimit: 200_000_000, - redistributeCheckpointBudget: false, ...overrideConfig, }; @@ -100,8 +100,32 @@ describe('CheckpointBuilder', () => { ); } + /** Default opts for validator-mode tests (no redistribution). */ + function validatorOpts(overrides?: Partial & { minValidTxs?: number }): BlockBuilderOptions { + return { ...overrides, isBuildingProposal: false, minValidTxs: overrides?.minValidTxs ?? 0 }; + } + + /** Default opts for proposer-mode tests (with redistribution). */ + function proposerOpts( + overrides?: Partial & { + minValidTxs?: number; + maxBlocksPerCheckpoint?: number; + perBlockAllocationMultiplier?: number; + }, + ): BlockBuilderOptions { + return { + ...overrides, + isBuildingProposal: true, + maxBlocksPerCheckpoint: overrides?.maxBlocksPerCheckpoint ?? 5, + perBlockAllocationMultiplier: overrides?.perBlockAllocationMultiplier ?? 1.2, + minValidTxs: overrides?.minValidTxs ?? 0, + }; + } + beforeEach(() => { - lightweightCheckpointBuilder = mock({ checkpointNumber, constants }); + lightweightCheckpointBuilder = mock(); + Object.defineProperty(lightweightCheckpointBuilder, 'checkpointNumber', { value: checkpointNumber }); + Object.defineProperty(lightweightCheckpointBuilder, 'constants', { value: constants }); lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); fork = mock(); @@ -132,7 +156,7 @@ describe('CheckpointBuilder', () => { [], // debugLogs ]); - const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n); + const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n, validatorOpts()); expect(result.block).toBe(expectedBlock); expect(result.numTxs).toBe(1); @@ -153,7 +177,7 @@ describe('CheckpointBuilder', () => { [], // debugLogs ]); - const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n, { minValidTxs: 0 }); + const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n, validatorOpts({ minValidTxs: 0 })); expect(result.block).toBe(expectedBlock); expect(result.numTxs).toBe(0); @@ -170,9 +194,9 @@ describe('CheckpointBuilder', () => { [], // debugLogs ]); - await expect(checkpointBuilder.buildBlock([], blockNumber, 1000n, { minValidTxs: 1 })).rejects.toThrow( - InsufficientValidTxsError, - ); + await expect( + checkpointBuilder.buildBlock([], blockNumber, 1000n, validatorOpts({ minValidTxs: 1 })), + ).rejects.toThrow(InsufficientValidTxsError); expect(lightweightCheckpointBuilder.addBlock).not.toHaveBeenCalled(); }); @@ -190,7 +214,7 @@ describe('CheckpointBuilder', () => { ]); const err = await checkpointBuilder - .buildBlock([], blockNumber, 1000n, { minValidTxs: 2 }) + .buildBlock([], blockNumber, 1000n, validatorOpts({ minValidTxs: 2 })) .catch((e: unknown) => e); expect(err).toBeInstanceOf(InsufficientValidTxsError); @@ -205,14 +229,14 @@ describe('CheckpointBuilder', () => { processor.process.mockResolvedValue([[], [], [], [], []]); - const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n); + const result = await checkpointBuilder.buildBlock([], blockNumber, 1000n, validatorOpts()); expect(result.numTxs).toBe(0); expect(lightweightCheckpointBuilder.addBlock).toHaveBeenCalled(); }); }); - describe('capLimitsByCheckpointBudgets', () => { + describe('capLimitsByCheckpointBudgets (validator mode)', () => { const totalBlobCapacity = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB - NUM_CHECKPOINT_END_MARKER_FIELDS; const firstBlockEndOverhead = getNumBlockEndBlobFields(true); const nonFirstBlockEndOverhead = getNumBlockEndBlobFields(false); @@ -226,8 +250,9 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [10], blockBlobFieldCount: 20 }), ]); - const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 800_000) }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + validatorOpts({ maxBlockGas: new Gas(Infinity, 800_000) }), + ); // Remaining mana = 1_000_000 - 600_000 = 400_000. Per-block = 800_000. Capped to 400_000. expect(capped.maxBlockGas!.l2Gas).toBe(400_000); @@ -242,8 +267,9 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [10], blockBlobFieldCount: 20 }), ]); - const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 500_000) }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + validatorOpts({ maxBlockGas: new Gas(Infinity, 500_000) }), + ); // Remaining mana = 800_000. Per-block = 500_000. Uses 500_000. expect(capped.maxBlockGas!.l2Gas).toBe(500_000); @@ -256,8 +282,9 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: 100_000, txBlobFields: [10], blockBlobFieldCount: 20 }), ]); - const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 500_000) }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + validatorOpts({ maxBlockGas: new Gas(Infinity, 500_000) }), + ); // Remaining mana = 200_000_000 - 100_000 >> 500_000, so per-block limit is used expect(capped.maxBlockGas!.l2Gas).toBe(500_000); @@ -274,8 +301,9 @@ describe('CheckpointBuilder', () => { ]); const perBlockDAGas = 500_000; - const opts: PublicProcessorLimits = { maxBlockGas: new Gas(perBlockDAGas, Infinity) }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + validatorOpts({ maxBlockGas: new Gas(perBlockDAGas, Infinity) }), + ); // Remaining DA gas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - priorDAGas const expectedRemainingDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - priorDAGas; @@ -291,8 +319,7 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [100], blockBlobFieldCount: 110 }), ]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts()); expect(capped.maxBlockGas!.l2Gas).toBe(400_000); expect(capped.maxBlockGas!.daGas).toBe(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - 100 * DA_GAS_PER_FIELD); @@ -306,8 +333,9 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount }), ]); - const opts: PublicProcessorLimits = { maxBlobFields: 99999 }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + validatorOpts({ maxBlobFields: 99999 }), + ); // Second block: remaining = totalBlobCapacity - 100, minus non-first block end overhead const expectedMaxBlobFields = totalBlobCapacity - blockBlobFieldCount - nonFirstBlockEndOverhead; @@ -319,8 +347,7 @@ describe('CheckpointBuilder', () => { lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts()); // First block: full capacity minus first block end overhead const expectedMaxBlobFields = totalBlobCapacity - firstBlockEndOverhead; @@ -335,8 +362,9 @@ describe('CheckpointBuilder', () => { const block2 = createMockBlock({ manaUsed: 200_000, txBlobFields: [150], blockBlobFieldCount: 160 }); lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1, block2]); - const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, Infinity) }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + validatorOpts({ maxBlockGas: new Gas(Infinity, Infinity) }), + ); // Remaining mana = 1_000_000 - 300_000 - 200_000 = 500_000 expect(capped.maxBlockGas!.l2Gas).toBe(500_000); @@ -360,7 +388,7 @@ describe('CheckpointBuilder', () => { const block1 = createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount: block1BlobFieldCount }); lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1]); - const afterOneBlock = (checkpointBuilder as TestCheckpointBuilder).testCapLimits({}); + const afterOneBlock = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts()); const expectedAfterOneBlock = totalBlobCapacity - block1BlobFieldCount - nonFirstBlockEndOverhead; expect(afterOneBlock.maxBlobFields).toBe(expectedAfterOneBlock); @@ -369,7 +397,7 @@ describe('CheckpointBuilder', () => { const block2 = createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount: block2BlobFieldCount }); lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1, block2]); - const afterTwoBlocks = (checkpointBuilder as TestCheckpointBuilder).testCapLimits({}); + const afterTwoBlocks = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts()); const expectedAfterTwoBlocks = totalBlobCapacity - block1BlobFieldCount - block2BlobFieldCount - nonFirstBlockEndOverhead; @@ -388,8 +416,7 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10], blockBlobFieldCount: 40 }), ]); - const opts: PublicProcessorLimits = { maxTransactions: 15 }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts({ maxTransactions: 15 })); // Remaining txs = 20 - 3 = 17. Per-block = 15. Capped to min(15, 17) = 15. expect(capped.maxTransactions).toBe(15); @@ -404,8 +431,7 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10], blockBlobFieldCount: 50 }), ]); - const opts: PublicProcessorLimits = { maxTransactions: 5 }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts({ maxTransactions: 5 })); // Remaining txs = 10 - 8 = 2. Per-block = 5. Capped to min(5, 2) = 2. expect(capped.maxTransactions).toBe(2); @@ -419,8 +445,7 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10, 10], blockBlobFieldCount: 60 }), ]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts()); // Remaining txs = 15 - 5 = 10 expect(capped.maxTransactions).toBe(10); @@ -431,8 +456,7 @@ describe('CheckpointBuilder', () => { lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); - const opts: PublicProcessorLimits = { maxTransactions: 99 }; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts({ maxTransactions: 99 })); // Passthrough: maxTransactions = 99 expect(capped.maxTransactions).toBe(99); @@ -443,50 +467,174 @@ describe('CheckpointBuilder', () => { lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts()); // Neither config nor caller sets it, so it remains undefined expect(capped.maxTransactions).toBeUndefined(); }); - }); - describe('redistributeCheckpointBudget', () => { - it('evenly splits budget with multiplier=1', () => { + it('does not apply redistribution multiplier in validator mode', () => { const rollupManaLimit = 1_000_000; - setupBuilder({ - redistributeCheckpointBudget: true, - perBlockAllocationMultiplier: 1, - maxBlocksPerCheckpoint: 5, - rollupManaLimit, - }); + setupBuilder({ rollupManaLimit }); - lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 200_000, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + // Validator mode should not redistribute — just remaining budget + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(validatorOpts()); - // Fair share = ceil(1_000_000 / 5 * 1) = 200_000 - expect(capped.maxBlockGas!.l2Gas).toBe(200_000); + // No fair share, just remaining budget = 800_000 + expect(capped.maxBlockGas!.l2Gas).toBe(800_000); }); + }); - it('computes fair share with multiplier=1.2, 5 max blocks, 2 existing', () => { - const rollupManaLimit = 1_000_000; - setupBuilder({ - redistributeCheckpointBudget: true, - perBlockAllocationMultiplier: 1.2, - maxBlocksPerCheckpoint: 5, - rollupManaLimit, + describe('multi-block gas redistribution through buildBlock', () => { + // This test exercises the production code path where: + // 1. CheckpointProposalJob passes maxBlocksPerCheckpoint and perBlockAllocationMultiplier via opts + // 2. CheckpointBuilder.capLimitsByCheckpointBudgets redistributes remaining budget across remaining blocks + + const rollupManaLimit = 1_000_000; + const maxBlocks = 5; + const multiplier = 1.2; + + // Opts that mimic what CheckpointProposalJob passes: operator per-block gas limit + redistribution params + const staticPerBlockL2Gas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxBlocks) * multiplier)); + // = min(1_000_000, 240_000) = 240_000 + + const blockBuilderOpts: BlockBuilderOptions = proposerOpts({ + maxBlockGas: new Gas(Infinity, staticPerBlockL2Gas), + maxBlocksPerCheckpoint: maxBlocks, + perBlockAllocationMultiplier: multiplier, + }); + + it('tightens per-block L2 gas limit when prior blocks consumed more than their even share', async () => { + setupBuilder({ rollupManaLimit }); + + // Simulate: blocks 0 and 1 already built, each using 300k mana (above even share of 200k) + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 300_000, txBlobFields: [10], blockBlobFieldCount: 20 }), + createMockBlock({ manaUsed: 300_000, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); + + const expectedBlock = await L2Block.random(blockNumber); + lightweightCheckpointBuilder.addBlock.mockResolvedValue({ block: expectedBlock, timings: {} }); + processor.process.mockResolvedValue([[{ hash: Fr.random() } as unknown as ProcessedTx], [], [], [], []]); + + // Build block 2 + await checkpointBuilder.buildBlock([], blockNumber, 1000n, blockBuilderOpts); + + // Remaining mana = 1M - 600k = 400k, with 3 blocks remaining (out of 5). + // Expected fair share = ceil(400k / 3 * 1.2) = ceil(160_000) = 160_000 + // Expected cap = min(staticPerBlockL2Gas=240k, fairShare=160k, remaining=400k) = 160_000 + const processCall = processor.process.mock.calls[0]; + const limitsPassedToProcessor = processCall[1] as PublicProcessorLimits; + expect(limitsPassedToProcessor.maxBlockGas!.l2Gas).toBe(160_000); + }); + + it('progressively tightens limits across all blocks in checkpoint', async () => { + setupBuilder({ rollupManaLimit }); + + const expectedBlock = await L2Block.random(blockNumber); + lightweightCheckpointBuilder.addBlock.mockResolvedValue({ block: expectedBlock, timings: {} }); + processor.process.mockResolvedValue([[{ hash: Fr.random() } as unknown as ProcessedTx], [], [], [], []]); + + const capturedL2GasLimits: number[] = []; + + // Build 5 blocks. Each block uses 200k mana (its even share). + for (let i = 0; i < maxBlocks; i++) { + // Set up prior blocks (each used 200k mana) + const priorBlocks = Array.from({ length: i }, () => + createMockBlock({ manaUsed: 200_000, txBlobFields: [10], blockBlobFieldCount: 20 }), + ); + lightweightCheckpointBuilder.getBlocks.mockReturnValue(priorBlocks); + + await checkpointBuilder.buildBlock([], BlockNumber(blockNumber + i), 1000n, blockBuilderOpts); + + const processCall = processor.process.mock.calls[i]; + const limits = processCall[1] as PublicProcessorLimits; + capturedL2GasLimits.push(limits.maxBlockGas!.l2Gas); + } + + // With correct redistribution (5 blocks, each using 200k mana): + // Block 0: remaining=1M, remainingBlocks=5, fairShare=ceil(1M/5*1.2)=240k, cap=min(240k,240k,1M)=240k + // Block 1: remaining=800k, remainingBlocks=4, fairShare=ceil(800k/4*1.2)=240k, cap=min(240k,240k,800k)=240k + // Block 2: remaining=600k, remainingBlocks=3, fairShare=ceil(600k/3*1.2)=240k, cap=min(240k,240k,600k)=240k + // Block 3: remaining=400k, remainingBlocks=2, fairShare=ceil(400k/2*1.2)=240k, cap=min(240k,240k,400k)=240k + // Block 4: remaining=200k, remainingBlocks=1, fairShare=ceil(200k/1*1.2)=240k, cap=min(240k,240k,200k)=200k + expect(capturedL2GasLimits).toEqual([240_000, 240_000, 240_000, 240_000, 200_000]); + }); + + it('prevents block starvation when early blocks are heavy', async () => { + setupBuilder({ rollupManaLimit }); + + const expectedBlock = await L2Block.random(blockNumber); + lightweightCheckpointBuilder.addBlock.mockResolvedValue({ block: expectedBlock, timings: {} }); + processor.process.mockResolvedValue([[{ hash: Fr.random() } as unknown as ProcessedTx], [], [], [], []]); + + const capturedL2GasLimits: number[] = []; + + // Build 5 blocks. First 2 blocks use 300k each (heavy), rest use whatever they get. + const manaUsedPerBlock = [300_000, 300_000, 0, 0, 0]; // only first 2 are "used" as prior blocks + + for (let i = 0; i < maxBlocks; i++) { + const priorBlocks = Array.from({ length: i }, (_, j) => + createMockBlock({ manaUsed: manaUsedPerBlock[j], txBlobFields: [10], blockBlobFieldCount: 20 }), + ); + lightweightCheckpointBuilder.getBlocks.mockReturnValue(priorBlocks); + + await checkpointBuilder.buildBlock([], BlockNumber(blockNumber + i), 1000n, blockBuilderOpts); + + const processCall = processor.process.mock.calls[i]; + const limits = processCall[1] as PublicProcessorLimits; + capturedL2GasLimits.push(limits.maxBlockGas!.l2Gas); + } + + // With correct redistribution and heavy early blocks (300k each): + // Block 0: remaining=1M, remainingBlocks=5, fairShare=ceil(1M/5*1.2)=240k, cap=min(240k,240k,1M)=240k + // Block 1: remaining=700k, remainingBlocks=4, fairShare=ceil(700k/4*1.2)=210k, cap=min(240k,210k,700k)=210k + // Block 2: remaining=400k, remainingBlocks=3, fairShare=ceil(400k/3*1.2)=160k, cap=min(240k,160k,400k)=160k + // Block 3: remaining=400k, remainingBlocks=2, fairShare=ceil(400k/2*1.2)=240k, cap=min(240k,240k,400k)=240k + // Block 4: remaining=400k, remainingBlocks=1, fairShare=ceil(400k/1*1.2)=480k, cap=min(240k,480k,400k)=240k + expect(capturedL2GasLimits[0]).toBe(240_000); // Block 0: full fair share + expect(capturedL2GasLimits[1]).toBe(210_000); // Block 1: tightened by redistribution + expect(capturedL2GasLimits[2]).toBe(160_000); // Block 2: tightened further + expect(capturedL2GasLimits[3]).toBe(240_000); // Block 3: relaxed (blocks 2-3 used nothing) + expect(capturedL2GasLimits[4]).toBe(240_000); // Block 4: still has plenty of budget + }); + + it('explicit per-block limit wins over redistribution when tighter', async () => { + setupBuilder({ rollupManaLimit }); + + const expectedBlock = await L2Block.random(blockNumber); + lightweightCheckpointBuilder.addBlock.mockResolvedValue({ block: expectedBlock, timings: {} }); + processor.process.mockResolvedValue([[{ hash: Fr.random() } as unknown as ProcessedTx], [], [], [], []]); + + // Explicit per-block limit (100k) is TIGHTER than redistribution. + // No prior blocks: remaining=1M, 5 remaining, fairShare=ceil(1M/5*1.2)=240k. + // cap = min(100k, 240k, 1M) = 100k — explicit wins. + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + await checkpointBuilder.buildBlock([], blockNumber, 1000n, { + ...blockBuilderOpts, + maxBlockGas: new Gas(Infinity, 100_000), }); + expect((processor.process.mock.calls[0][1] as PublicProcessorLimits).maxBlockGas!.l2Gas).toBe(100_000); + }); + }); + + describe('proposer redistribution via opts', () => { + it('computes fair share with multiplier across remaining blocks', () => { + const rollupManaLimit = 1_000_000; + setupBuilder({ rollupManaLimit }); + // 2 existing blocks used 400_000 mana total lightweightCheckpointBuilder.getBlocks.mockReturnValue([ createMockBlock({ manaUsed: 200_000, txBlobFields: [10], blockBlobFieldCount: 20 }), createMockBlock({ manaUsed: 200_000, txBlobFields: [10], blockBlobFieldCount: 20 }), ]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(proposerOpts()); // remainingMana = 600_000, remainingBlocks = 3, multiplier = 1.2 // fairShare = ceil(600_000 / 3 * 1.2) = ceil(240_000) = 240_000 @@ -495,12 +643,7 @@ describe('CheckpointBuilder', () => { it('gives all remaining budget to last block (remainingBlocks=1)', () => { const rollupManaLimit = 1_000_000; - setupBuilder({ - redistributeCheckpointBudget: true, - perBlockAllocationMultiplier: 1.2, - maxBlocksPerCheckpoint: 3, - rollupManaLimit, - }); + setupBuilder({ rollupManaLimit }); // 2 existing blocks used 800_000 total lightweightCheckpointBuilder.getBlocks.mockReturnValue([ @@ -508,44 +651,23 @@ describe('CheckpointBuilder', () => { createMockBlock({ manaUsed: 400_000, txBlobFields: [10], blockBlobFieldCount: 20 }), ]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + proposerOpts({ maxBlocksPerCheckpoint: 3 }), + ); // remainingMana = 200_000, remainingBlocks = 1, multiplier = 1.2 // fairShare = ceil(200_000 / 1 * 1.2) = 240_000. min(200_000, 240_000, 200_000) = 200_000 expect(capped.maxBlockGas!.l2Gas).toBe(200_000); }); - it('uses old behavior when redistributeCheckpointBudget is false', () => { - const rollupManaLimit = 1_000_000; - setupBuilder({ - redistributeCheckpointBudget: false, - maxBlocksPerCheckpoint: 5, - rollupManaLimit, - }); - - lightweightCheckpointBuilder.getBlocks.mockReturnValue([ - createMockBlock({ manaUsed: 200_000, txBlobFields: [10], blockBlobFieldCount: 20 }), - ]); - - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); - - // Old behavior: no fair share, just remaining budget = 800_000 - expect(capped.maxBlockGas!.l2Gas).toBe(800_000); - }); - it('redistributes DA gas across remaining blocks', () => { - setupBuilder({ - redistributeCheckpointBudget: true, - perBlockAllocationMultiplier: 1, - maxBlocksPerCheckpoint: 4, - }); + setupBuilder(); lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + proposerOpts({ maxBlocksPerCheckpoint: 4, perBlockAllocationMultiplier: 1 }), + ); // fairShareDA = ceil(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT / 4 * 1) const expectedDA = Math.ceil(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT / 4); @@ -553,20 +675,16 @@ describe('CheckpointBuilder', () => { }); it('redistributes tx count across remaining blocks', () => { - setupBuilder({ - redistributeCheckpointBudget: true, - perBlockAllocationMultiplier: 1, - maxBlocksPerCheckpoint: 4, - maxTxsPerCheckpoint: 100, - }); + setupBuilder({ maxTxsPerCheckpoint: 100 }); // 1 existing block with 10 txs lightweightCheckpointBuilder.getBlocks.mockReturnValue([ createMockBlock({ manaUsed: 0, txBlobFields: new Array(10).fill(1), blockBlobFieldCount: 20 }), ]); - const opts: PublicProcessorLimits = {}; - const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits( + proposerOpts({ maxBlocksPerCheckpoint: 4, perBlockAllocationMultiplier: 1 }), + ); // remainingTxs = 90, remainingBlocks = 3, multiplier = 1 // fairShareTxs = ceil(90 / 3 * 1) = 30 diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index 906bcfe7da98..05bd83e5223a 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -20,6 +20,7 @@ import type { ContractDataSource } from '@aztec/stdlib/contract'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import { + type BlockBuilderOptions, type BuildBlockInCheckpointResult, type FullNodeBlockBuilderConfig, FullNodeBlockBuilderConfigKeys, @@ -74,7 +75,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { pendingTxs: Iterable | AsyncIterable, blockNumber: BlockNumber, timestamp: bigint, - opts: PublicProcessorLimits & { expectedEndState?: StateReference; minValidTxs?: number } = {}, + opts: BlockBuilderOptions & { expectedEndState?: StateReference }, ): Promise { const slot = this.checkpointBuilder.constants.slotNumber; @@ -167,11 +168,12 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { /** * Caps per-block gas and blob field limits by remaining checkpoint-level budgets. - * Computes remaining L2 gas (mana), DA gas, and blob fields from blocks already added to the checkpoint, - * then returns opts with maxBlockGas and maxBlobFields capped accordingly. + * When building a proposal (isBuildingProposal=true), computes a fair share of remaining budget + * across remaining blocks scaled by the multiplier. When validating, only caps by per-block limit + * and remaining checkpoint budget (no redistribution or multiplier). */ protected capLimitsByCheckpointBudgets( - opts: PublicProcessorLimits, + opts: BlockBuilderOptions, ): Pick { const existingBlocks = this.checkpointBuilder.getBlocks(); @@ -192,39 +194,31 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { const blockEndOverhead = getNumBlockEndBlobFields(isFirstBlock); const maxBlobFieldsForTxs = totalBlobCapacity - usedBlobFields - blockEndOverhead; - // When redistributeCheckpointBudget is enabled (default), compute a fair share of remaining budget - // across remaining blocks scaled by the multiplier, instead of letting one block consume it all. - const redistribute = this.config.redistributeCheckpointBudget !== false; - const remainingBlocks = Math.max(1, (this.config.maxBlocksPerCheckpoint ?? 1) - existingBlocks.length); - const multiplier = this.config.perBlockAllocationMultiplier ?? 1.2; - - // Cap L2 gas by remaining checkpoint mana (with fair share when redistributing) - const fairShareL2 = redistribute ? Math.ceil((remainingMana / remainingBlocks) * multiplier) : Infinity; - const cappedL2Gas = Math.min(opts.maxBlockGas?.l2Gas ?? Infinity, fairShareL2, remainingMana); - - // Cap DA gas by remaining checkpoint DA gas budget (with fair share when redistributing) - const fairShareDA = redistribute ? Math.ceil((remainingDAGas / remainingBlocks) * multiplier) : Infinity; - const cappedDAGas = Math.min(opts.maxBlockGas?.daGas ?? remainingDAGas, fairShareDA, remainingDAGas); - - // Cap blob fields by remaining checkpoint blob capacity (with fair share when redistributing) - const fairShareBlobs = redistribute ? Math.ceil((maxBlobFieldsForTxs / remainingBlocks) * multiplier) : Infinity; - const cappedBlobFields = Math.min(opts.maxBlobFields ?? Infinity, fairShareBlobs, maxBlobFieldsForTxs); - - // Cap transaction count by remaining checkpoint tx budget (with fair share when redistributing) - let cappedMaxTransactions: number | undefined; - if (this.config.maxTxsPerCheckpoint !== undefined) { - const usedTxs = sum(existingBlocks.map(b => b.body.txEffects.length)); - const remainingTxs = Math.max(0, this.config.maxTxsPerCheckpoint - usedTxs); - const fairShareTxs = redistribute ? Math.ceil((remainingTxs / remainingBlocks) * multiplier) : Infinity; - cappedMaxTransactions = Math.min(opts.maxTransactions ?? Infinity, fairShareTxs, remainingTxs); - } else { - cappedMaxTransactions = opts.maxTransactions; + // Remaining txs + const usedTxs = sum(existingBlocks.map(b => b.body.txEffects.length)); + const remainingTxs = Math.max(0, (this.config.maxTxsPerCheckpoint ?? Infinity) - usedTxs); + + // Cap by per-block limit + remaining checkpoint budget + let cappedL2Gas = Math.min(opts.maxBlockGas?.l2Gas ?? Infinity, remainingMana); + let cappedDAGas = Math.min(opts.maxBlockGas?.daGas ?? Infinity, remainingDAGas); + let cappedBlobFields = Math.min(opts.maxBlobFields ?? Infinity, maxBlobFieldsForTxs); + let cappedMaxTransactions = Math.min(opts.maxTransactions ?? Infinity, remainingTxs); + + // Proposer mode: further cap by fair share of remaining budget across remaining blocks + if (opts.isBuildingProposal) { + const remainingBlocks = Math.max(1, opts.maxBlocksPerCheckpoint - existingBlocks.length); + const multiplier = opts.perBlockAllocationMultiplier; + + cappedL2Gas = Math.min(cappedL2Gas, Math.ceil((remainingMana / remainingBlocks) * multiplier)); + cappedDAGas = Math.min(cappedDAGas, Math.ceil((remainingDAGas / remainingBlocks) * multiplier)); + cappedBlobFields = Math.min(cappedBlobFields, Math.ceil((maxBlobFieldsForTxs / remainingBlocks) * multiplier)); + cappedMaxTransactions = Math.min(cappedMaxTransactions, Math.ceil((remainingTxs / remainingBlocks) * multiplier)); } return { maxBlockGas: new Gas(cappedDAGas, cappedL2Gas), maxBlobFields: cappedBlobFields, - maxTransactions: cappedMaxTransactions, + maxTransactions: Number.isFinite(cappedMaxTransactions) ? cappedMaxTransactions : undefined, }; } diff --git a/yarn-project/validator-client/src/validator.integration.test.ts b/yarn-project/validator-client/src/validator.integration.test.ts index 19ad2e1e40f6..5c3c16662f57 100644 --- a/yarn-project/validator-client/src/validator.integration.test.ts +++ b/yarn-project/validator-client/src/validator.integration.test.ts @@ -212,7 +212,12 @@ describe('ValidatorClient Integration', () => { l1ToL2Messages: Fr[] = [], ): Promise<{ block: L2Block; proposal: BlockProposal }> => { const inHash = computeInHashFromL1ToL2Messages(l1ToL2Messages); - const { block, usedTxs } = await checkpointBuilder.buildBlock(txs, blockNumber, timestamp, {}); + const { block, usedTxs } = await checkpointBuilder.buildBlock(txs, blockNumber, timestamp, { + isBuildingProposal: true, + maxBlocksPerCheckpoint: 1, + perBlockAllocationMultiplier: 1.2, + minValidTxs: 0, + }); const proposal = await proposer.validator.createBlockProposal( block.header,