diff --git a/yarn-project/archiver/src/archiver.ts b/yarn-project/archiver/src/archiver.ts index de82a0482186..28c0cfa720ab 100644 --- a/yarn-project/archiver/src/archiver.ts +++ b/yarn-project/archiver/src/archiver.ts @@ -120,7 +120,11 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra }, private readonly blobClient: BlobClientInterface, instrumentation: ArchiverInstrumentation, - protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, + protected override readonly l1Constants: L1RollupConstants & { + l1StartBlockHash: Buffer32; + genesisArchiveRoot: Fr; + rollupManaLimit?: number; + }, synchronizer: ArchiverL1Synchronizer, events: ArchiverEmitter, l2TipsCache?: L2TipsCache, @@ -133,7 +137,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra this.synchronizer = synchronizer; this.events = events; this.l2TipsCache = l2TipsCache ?? new L2TipsCache(this.dataStore.blockStore); - this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache); + this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache, { + rollupManaLimit: l1Constants.rollupManaLimit, + }); // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync // are done as fast as possible. This then gets updated once the initial sync completes. diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index ca4d60f8a780..f7f2d46b44db 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -85,6 +85,7 @@ export async function createArchiver( genesisArchiveRoot, slashingProposerAddress, targetCommitteeSize, + rollupManaLimit, ] = await Promise.all([ rollup.getL1StartBlock(), rollup.getL1GenesisTime(), @@ -92,6 +93,7 @@ export async function createArchiver( rollup.getGenesisArchiveTreeRoot(), rollup.getSlashingProposerAddress(), rollup.getTargetCommitteeSize(), + rollup.getManaLimit(), ] as const); const l1StartBlockHash = await publicClient @@ -110,6 +112,7 @@ export async function createArchiver( proofSubmissionEpochs: Number(proofSubmissionEpochs), targetCommitteeSize, genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()), + rollupManaLimit: Number(rollupManaLimit), }; const archiverConfig = merge( diff --git a/yarn-project/archiver/src/modules/data_store_updater.test.ts b/yarn-project/archiver/src/modules/data_store_updater.test.ts index e261b76faab9..94721e4c22ea 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.test.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.test.ts @@ -5,9 +5,7 @@ import { ContractClassPublishedEvent } from '@aztec/protocol-contracts/class-reg import { ContractInstancePublishedEvent } from '@aztec/protocol-contracts/instance-registry'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { L2Block } from '@aztec/stdlib/block'; -import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { ContractClassLog, PrivateLog } from '@aztec/stdlib/logs'; -import { CheckpointHeader } from '@aztec/stdlib/rollup'; import '@aztec/stdlib/testing/jest'; import { readFileSync } from 'fs'; @@ -15,7 +13,7 @@ import { dirname, resolve } from 'path'; import { fileURLToPath } from 'url'; import { KVArchiverDataStore } from '../store/kv_archiver_store.js'; -import { makePublishedCheckpoint } from '../test/mock_structs.js'; +import { makeCheckpoint, makePublishedCheckpoint } from '../test/mock_structs.js'; import { ArchiverDataStoreUpdater } from './data_store_updater.js'; /** Loads the sample ContractClassPublished event payload from protocol-contracts fixtures. */ @@ -110,12 +108,7 @@ describe('ArchiverDataStoreUpdater', () => { // Make sure it has a different archive root (which it will by default from random) expect(conflictingBlock.archive.root.equals(localBlock.archive.root)).toBe(false); - const checkpointWithConflict = new Checkpoint( - conflictingBlock.archive, - CheckpointHeader.random({ slotNumber: SlotNumber(100) }), - [conflictingBlock], - CheckpointNumber(1), - ); + const checkpointWithConflict = makeCheckpoint([conflictingBlock]); const publishedCheckpoint = makePublishedCheckpoint(checkpointWithConflict, 10); // This should detect the conflict and prune the local block @@ -135,8 +128,7 @@ describe('ArchiverDataStoreUpdater', () => { block.body.txEffects[0].contractClassLogs = [contractClassLog]; block.body.txEffects[0].privateLogs = [PrivateLog.fromBuffer(getSampleContractInstancePublishedEventPayload())]; - const checkpoint = new Checkpoint(block.archive, CheckpointHeader.random(), [block], CheckpointNumber(1)); - const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10); + const publishedCheckpoint = makePublishedCheckpoint(makeCheckpoint([block]), 10); await updater.addCheckpoints([publishedCheckpoint]); @@ -166,8 +158,7 @@ describe('ArchiverDataStoreUpdater', () => { await updater.addProposedBlocks([block]); // Create checkpoint with the SAME block (same archive root) - const checkpoint = new Checkpoint(block.archive, CheckpointHeader.random(), [block], CheckpointNumber(1)); - const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10); + const publishedCheckpoint = makePublishedCheckpoint(makeCheckpoint([block]), 10); await updater.addCheckpoints([publishedCheckpoint]); @@ -196,13 +187,7 @@ describe('ArchiverDataStoreUpdater', () => { }); expect(checkpointBlock.archive.root.equals(localBlock.archive.root)).toBe(false); - const checkpoint = new Checkpoint( - checkpointBlock.archive, - CheckpointHeader.random({ slotNumber: SlotNumber(100) }), - [checkpointBlock], - CheckpointNumber(1), - ); - await updater.addCheckpoints([makePublishedCheckpoint(checkpoint, 10)]); + await updater.addCheckpoints([makePublishedCheckpoint(makeCheckpoint([checkpointBlock]), 10)]); // Verify checkpoint block is stored const storedBlock = await store.getBlock(BlockNumber(1)); diff --git a/yarn-project/archiver/src/modules/data_store_updater.ts b/yarn-project/archiver/src/modules/data_store_updater.ts index dd2e6becd57a..83864240f01d 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.ts @@ -11,7 +11,7 @@ import { ContractInstanceUpdatedEvent, } from '@aztec/protocol-contracts/instance-registry'; import type { L2Block, ValidateCheckpointResult } from '@aztec/stdlib/block'; -import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { type PublishedCheckpoint, validateCheckpoint } from '@aztec/stdlib/checkpoint'; import { type ExecutablePrivateFunctionWithMembershipProof, type UtilityFunctionWithMembershipProof, @@ -48,6 +48,7 @@ export class ArchiverDataStoreUpdater { constructor( private store: KVArchiverDataStore, private l2TipsCache?: L2TipsCache, + private opts: { rollupManaLimit?: number } = {}, ) {} /** @@ -97,6 +98,10 @@ export class ArchiverDataStoreUpdater { checkpoints: PublishedCheckpoint[], pendingChainValidationStatus?: ValidateCheckpointResult, ): Promise { + for (const checkpoint of checkpoints) { + validateCheckpoint(checkpoint.checkpoint, { rollupManaLimit: this.opts?.rollupManaLimit }); + } + const result = await this.store.transactionAsync(async () => { // Before adding checkpoints, check for conflicts with local blocks if any const { prunedBlocks, lastAlreadyInsertedBlockNumber } = await this.pruneMismatchingLocalBlocks(checkpoints); diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index ae4bca9dc898..5f75863f98db 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -69,13 +69,19 @@ export class ArchiverL1Synchronizer implements Traceable { private readonly epochCache: EpochCache, private readonly dateProvider: DateProvider, private readonly instrumentation: ArchiverInstrumentation, - private readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, + private readonly l1Constants: L1RollupConstants & { + l1StartBlockHash: Buffer32; + genesisArchiveRoot: Fr; + rollupManaLimit?: number; + }, private readonly events: ArchiverEmitter, tracer: Tracer, l2TipsCache?: L2TipsCache, private readonly log: Logger = createLogger('archiver:l1-sync'), ) { - this.updater = new ArchiverDataStoreUpdater(this.store, l2TipsCache); + this.updater = new ArchiverDataStoreUpdater(this.store, l2TipsCache, { + rollupManaLimit: l1Constants.rollupManaLimit, + }); this.tracer = tracer; } diff --git a/yarn-project/archiver/src/test/mock_structs.ts b/yarn-project/archiver/src/test/mock_structs.ts index 974141601f5b..0888d717218c 100644 --- a/yarn-project/archiver/src/test/mock_structs.ts +++ b/yarn-project/archiver/src/test/mock_structs.ts @@ -127,6 +127,25 @@ export function makeL1PublishedData(l1BlockNumber: number): L1PublishedData { return new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)); } +/** Creates a Checkpoint from a list of blocks with a header that matches the blocks' structure. */ +export function makeCheckpoint(blocks: L2Block[], checkpointNumber = CheckpointNumber(1)): Checkpoint { + const firstBlock = blocks[0]; + const { slotNumber, timestamp, coinbase, feeRecipient, gasFees } = firstBlock.header.globalVariables; + return new Checkpoint( + blocks.at(-1)!.archive, + CheckpointHeader.random({ + lastArchiveRoot: firstBlock.header.lastArchive.root, + slotNumber, + timestamp, + coinbase, + feeRecipient, + gasFees, + }), + blocks, + checkpointNumber, + ); +} + /** Wraps a Checkpoint with L1 published data and random attestations. */ export function makePublishedCheckpoint( checkpoint: Checkpoint, @@ -301,11 +320,6 @@ export async function makeCheckpointWithLogs( return txEffect; }); - const checkpoint = new Checkpoint( - AppendOnlyTreeSnapshot.random(), - CheckpointHeader.random(), - [block], - CheckpointNumber.fromBlockNumber(BlockNumber(blockNumber)), - ); + const checkpoint = makeCheckpoint([block], CheckpointNumber.fromBlockNumber(BlockNumber(blockNumber))); return makePublishedCheckpoint(checkpoint, blockNumber); } diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index a00a1879c1a3..566c831c9abf 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -202,6 +202,7 @@ export type EnvVar = | 'SENTINEL_HISTORY_LENGTH_IN_EPOCHS' | 'SENTINEL_HISTORIC_PROVEN_PERFORMANCE_LENGTH_IN_EPOCHS' | 'SEQ_MAX_TX_PER_BLOCK' + | 'SEQ_MAX_TX_PER_CHECKPOINT' | 'SEQ_MIN_TX_PER_BLOCK' | 'SEQ_PUBLISH_TXS_WITH_PROPOSALS' | 'SEQ_MAX_DA_BLOCK_GAS' diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts index 9bc2e2888864..ea093cd2ab2e 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts @@ -61,4 +61,70 @@ describe('CheckpointProposalValidator', () => { getTxs: () => [], epochCacheMock: () => mock(), }); + + describe('maxTxsPerBlock validation', () => { + const currentSlot = SlotNumber(100); + const nextSlot = SlotNumber(101); + let epochCache: ReturnType>; + + function setupEpochCache(proposerAddress: EthAddress) { + epochCache = mock(); + epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot, nextSlot }); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(proposerAddress); + } + + it('rejects checkpoint proposal when last block txHashes exceed maxTxsPerBlock', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 3 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: expect.anything() }); + }); + + it('accepts checkpoint proposal when last block txHashes are within maxTxsPerBlock', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 5 }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 3 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + + it('skips maxTxsPerBlock check when not configured', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { + txsPermitted: true, + maxTxsPerBlock: undefined, + }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 100 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + }); }); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts index e58a007a3de7..ec12ec3442f6 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts @@ -58,7 +58,7 @@ export function sharedProposalValidatorTests { epochCache = epochCacheMock(); - validator = validatorFactory(epochCache, { txsPermitted: true }); + validator = validatorFactory(epochCache, { txsPermitted: true, maxTxsPerBlock: undefined }); epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot: currentSlot, nextSlot: nextSlot, @@ -231,7 +231,10 @@ export function sharedProposalValidatorTests { it('returns mid tolerance error if txs not permitted and proposal contains txHashes', async () => { const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { txsPermitted: false }); + const validatorWithTxsDisabled = validatorFactory(epochCache, { + txsPermitted: false, + maxTxsPerBlock: undefined, + }); const header = makeHeader(1, 100, 100); const mockProposal = await makeProposal({ blockHeader: header, @@ -247,7 +250,10 @@ export function sharedProposalValidatorTests { const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { txsPermitted: false }); + const validatorWithTxsDisabled = validatorFactory(epochCache, { + txsPermitted: false, + maxTxsPerBlock: undefined, + }); const header = makeHeader(1, 100, 100); const mockProposal = await makeProposal({ blockHeader: header, diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 8da82a7d195b..783ce04e76dc 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -222,14 +222,12 @@ export class LibP2PService extends WithTracer implements P2PService { this.protocolVersion, ); - this.blockProposalValidator = new BlockProposalValidator(epochCache, { + const proposalValidatorOpts = { txsPermitted: !config.disableTransactions, maxTxsPerBlock: config.maxTxsPerBlock, - }); - this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, { - txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.maxTxsPerBlock, - }); + }; + this.blockProposalValidator = new BlockProposalValidator(epochCache, proposalValidatorOpts); + this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, proposalValidatorOpts); this.checkpointAttestationValidator = config.fishermanMode ? new FishermanAttestationValidator(epochCache, mempools.attestationPool, telemetry) : new CheckpointAttestationValidator(epochCache); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.test.ts b/yarn-project/sequencer-client/src/client/sequencer-client.test.ts new file mode 100644 index 000000000000..e325cefca47d --- /dev/null +++ b/yarn-project/sequencer-client/src/client/sequencer-client.test.ts @@ -0,0 +1,110 @@ +import { MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import { createLogger } from '@aztec/foundation/log'; + +import type { SequencerClientConfig } from '../config.js'; +import { computeBlockLimits } from './sequencer-client.js'; + +describe('computeBlockLimits', () => { + const log = createLogger('test'); + + /** Builds a minimal config with only the fields needed by computeBlockLimits. */ + function makeConfig(overrides: Partial = {}): SequencerClientConfig { + return { + ethereumSlotDuration: 12, + aztecSlotDuration: 72, + attestationPropagationTime: 3, + enforceTimeTable: true, + // No blockDurationMs -> single block mode -> maxNumberOfBlocks = 1 + ...overrides, + } as SequencerClientConfig; + } + + describe('L2 gas', () => { + it('derives maxL2BlockGas from rollupManaLimit when not explicitly set', () => { + const rollupManaLimit = 1_000_000; + // Single block mode (maxNumberOfBlocks=1), default multiplier=2: + // min(1_000_000, ceil(1_000_000 / 1 * 2)) = min(1_000_000, 2_000_000) = 1_000_000 + const result = computeBlockLimits(makeConfig(), rollupManaLimit, 12, log); + expect(result.maxL2BlockGas).toBe(rollupManaLimit); + }); + + it('uses explicit maxL2BlockGas when within rollupManaLimit', () => { + const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 500_000 }), 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(500_000); + }); + + it('caps explicit maxL2BlockGas at rollupManaLimit', () => { + const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 2_000_000 }), 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(1_000_000); + }); + }); + + describe('DA gas', () => { + const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + + it('derives maxDABlockGas from DA checkpoint limit when not explicitly set', () => { + // Single block mode (maxNumberOfBlocks=1), default multiplier=2: + // min(daLimit, ceil(daLimit / 1 * 2)) = min(daLimit, daLimit * 2) = daLimit + const result = computeBlockLimits(makeConfig(), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(daLimit); + }); + + it('uses explicit maxDABlockGas when within DA checkpoint limit', () => { + const explicit = Math.floor(daLimit / 2); + const result = computeBlockLimits(makeConfig({ maxDABlockGas: explicit }), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(explicit); + }); + + it('caps explicit maxDABlockGas at DA checkpoint limit', () => { + const result = computeBlockLimits(makeConfig({ maxDABlockGas: daLimit + 100_000 }), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(daLimit); + }); + }); + + describe('TX count', () => { + it('uses explicit maxTxsPerBlock when set', () => { + const result = computeBlockLimits(makeConfig({ maxTxsPerBlock: 10 }), 1_000_000, 12, log); + expect(result.maxTxsPerBlock).toBe(10); + }); + + it('caps maxTxsPerBlock at maxTxsPerCheckpoint', () => { + const result = computeBlockLimits( + makeConfig({ maxTxsPerBlock: 50, maxTxsPerCheckpoint: 30 }), + 1_000_000, + 12, + log, + ); + expect(result.maxTxsPerBlock).toBe(30); + }); + + it('derives maxTxsPerBlock from maxTxsPerCheckpoint when per-block not set', () => { + // Multi-block mode with maxNumberOfBlocks=5, multiplier=2: + // min(100, ceil(100 / 5 * 2)) = min(100, 40) = 40 + const config = makeConfig({ + maxTxsPerCheckpoint: 100, + blockDurationMs: 8000, + }); + const result = computeBlockLimits(config, 1_000_000, 12, log); + expect(result.maxTxsPerBlock).toBe(40); + }); + }); + + describe('multi-block mode', () => { + it('distributes budget across blocks in multi-block mode', () => { + // With blockDurationMs=8000, aztecSlotDuration=72, ethereumSlotDuration=12, + // attestationPropagationTime=3, l1PublishingTime=12: + // checkpointFinalizationTime = 1 + 3*2 + 12 = 19 + // timeReservedAtEnd = 8 + 19 = 27 + // timeAvailableForBlocks = 72 - 1 - 27 = 44 + // maxNumberOfBlocks = floor(44 / 8) = 5 + // With multiplier=2 and rollupManaLimit=1_000_000: + // maxL2BlockGas = min(1_000_000, ceil(1_000_000 / 5 * 2)) = min(1_000_000, 400_000) = 400_000 + const config = makeConfig({ blockDurationMs: 8000 }); + const result = computeBlockLimits(config, 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(400_000); + + const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + expect(result.maxDABlockGas).toBe(Math.min(daLimit, Math.ceil((daLimit / 5) * 2))); + }); + }); +}); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 015401c0377c..613c5d172219 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -160,9 +160,12 @@ export class SequencerClient { const l1PublishingTimeBasedOnChain = isAnvilTestChain(config.l1ChainId) ? 1 : ethereumSlotDuration; const l1PublishingTime = config.l1PublishingTime ?? l1PublishingTimeBasedOnChain; - // Combine user-defined block-level limits with checkpoint-level limits (from L1/constants/config) - // to derive the final per-block gas budgets fed into the sequencer. - const { maxL2BlockGas, maxDABlockGas } = this.computeBlockGasLimits(config, rollupManaLimit, l1PublishingTime, log); + const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock } = computeBlockLimits( + config, + rollupManaLimit, + l1PublishingTime, + log, + ); const l1Constants = { l1GenesisTime, slotDuration: Number(slotDuration), ethereumSlotDuration, rollupManaLimit }; @@ -180,7 +183,7 @@ export class SequencerClient { deps.dateProvider, epochCache, rollupContract, - { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas }, + { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }, telemetryClient, log, ); @@ -242,69 +245,91 @@ export class SequencerClient { get maxL2BlockGas(): number | undefined { return this.sequencer.maxL2BlockGas; } +} - /** - * Computes per-block L2 and DA gas budgets based on the L1 rollup limits and the timetable. - * If the user explicitly set a limit, it is capped at the corresponding checkpoint limit. - * Otherwise, derives it as (checkpointLimit / maxBlocks) * multiplier, capped at the checkpoint limit. - */ - private static computeBlockGasLimits( - config: SequencerClientConfig, - rollupManaLimit: number, - l1PublishingTime: number, - log: ReturnType, - ): { maxL2BlockGas: number; maxDABlockGas: number } { - const maxNumberOfBlocks = new SequencerTimetable({ - ethereumSlotDuration: config.ethereumSlotDuration, - aztecSlotDuration: config.aztecSlotDuration, - l1PublishingTime, - p2pPropagationTime: config.attestationPropagationTime, - blockDurationMs: config.blockDurationMs, - enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, - }).maxNumberOfBlocks; +/** + * Computes per-block L2 gas, DA gas, and TX count budgets based on the L1 rollup limits and the timetable. + * If the user explicitly set a limit, it is capped at the corresponding checkpoint limit. + * Otherwise, derives it as (checkpointLimit / maxBlocks) * multiplier, capped at the checkpoint limit. + */ +export function computeBlockLimits( + config: SequencerClientConfig, + rollupManaLimit: number, + l1PublishingTime: number, + log: ReturnType, +): { maxL2BlockGas: number; maxDABlockGas: number; maxTxsPerBlock: number } { + const maxNumberOfBlocks = new SequencerTimetable({ + ethereumSlotDuration: config.ethereumSlotDuration, + aztecSlotDuration: config.aztecSlotDuration, + l1PublishingTime, + p2pPropagationTime: config.attestationPropagationTime, + blockDurationMs: config.blockDurationMs, + enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, + }).maxNumberOfBlocks; - const multiplier = config.gasPerBlockAllocationMultiplier ?? DefaultSequencerConfig.gasPerBlockAllocationMultiplier; + const multiplier = config.gasPerBlockAllocationMultiplier ?? DefaultSequencerConfig.gasPerBlockAllocationMultiplier; - // Compute maxL2BlockGas - let maxL2BlockGas: number; - if (config.maxL2BlockGas !== undefined) { - if (config.maxL2BlockGas > rollupManaLimit) { - log.warn( - `Provided MAX_L2_BLOCK_GAS ${config.maxL2BlockGas} exceeds L1 rollup mana limit ${rollupManaLimit} (capping)`, - ); - maxL2BlockGas = rollupManaLimit; - } else { - maxL2BlockGas = config.maxL2BlockGas; - } + // Compute maxL2BlockGas + let maxL2BlockGas: number; + if (config.maxL2BlockGas !== undefined) { + if (config.maxL2BlockGas > rollupManaLimit) { + log.warn( + `Provided MAX_L2_BLOCK_GAS ${config.maxL2BlockGas} exceeds L1 rollup mana limit ${rollupManaLimit} (capping)`, + ); + maxL2BlockGas = rollupManaLimit; } else { - maxL2BlockGas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxNumberOfBlocks) * multiplier)); + maxL2BlockGas = config.maxL2BlockGas; } + } else { + maxL2BlockGas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxNumberOfBlocks) * multiplier)); + } - // Compute maxDABlockGas - const daCheckpointLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; - let maxDABlockGas: number; - if (config.maxDABlockGas !== undefined) { - if (config.maxDABlockGas > daCheckpointLimit) { - log.warn( - `Provided MAX_DA_BLOCK_GAS ${config.maxDABlockGas} exceeds DA checkpoint limit ${daCheckpointLimit} (capping)`, - ); - maxDABlockGas = daCheckpointLimit; - } else { - maxDABlockGas = config.maxDABlockGas; - } + // Compute maxDABlockGas + const daCheckpointLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + let maxDABlockGas: number; + if (config.maxDABlockGas !== undefined) { + if (config.maxDABlockGas > daCheckpointLimit) { + log.warn( + `Provided MAX_DA_BLOCK_GAS ${config.maxDABlockGas} exceeds DA checkpoint limit ${daCheckpointLimit} (capping)`, + ); + maxDABlockGas = daCheckpointLimit; } else { - maxDABlockGas = Math.min(daCheckpointLimit, Math.ceil((daCheckpointLimit / maxNumberOfBlocks) * multiplier)); + maxDABlockGas = config.maxDABlockGas; } + } else { + maxDABlockGas = Math.min(daCheckpointLimit, Math.ceil((daCheckpointLimit / maxNumberOfBlocks) * multiplier)); + } - log.info(`Computed block gas limits L2=${maxL2BlockGas} DA=${maxDABlockGas}`, { - maxL2BlockGas, - maxDABlockGas, - rollupManaLimit, - daCheckpointLimit, - maxNumberOfBlocks, - multiplier, - }); - - return { maxL2BlockGas, maxDABlockGas }; + // Compute maxTxsPerBlock + const defaultMaxTxsPerBlock = 32; + let maxTxsPerBlock: number; + if (config.maxTxsPerBlock !== undefined) { + if (config.maxTxsPerCheckpoint !== undefined && config.maxTxsPerBlock > config.maxTxsPerCheckpoint) { + log.warn( + `Provided MAX_TX_PER_BLOCK ${config.maxTxsPerBlock} exceeds MAX_TX_PER_CHECKPOINT ${config.maxTxsPerCheckpoint} (capping)`, + ); + maxTxsPerBlock = config.maxTxsPerCheckpoint; + } else { + maxTxsPerBlock = config.maxTxsPerBlock; + } + } else if (config.maxTxsPerCheckpoint !== undefined) { + maxTxsPerBlock = Math.min( + config.maxTxsPerCheckpoint, + Math.ceil((config.maxTxsPerCheckpoint / maxNumberOfBlocks) * multiplier), + ); + } else { + maxTxsPerBlock = defaultMaxTxsPerBlock; } + + log.info(`Computed block limits L2=${maxL2BlockGas} DA=${maxDABlockGas} maxTxs=${maxTxsPerBlock}`, { + maxL2BlockGas, + maxDABlockGas, + maxTxsPerBlock, + rollupManaLimit, + daCheckpointLimit, + maxNumberOfBlocks, + multiplier, + }); + + return { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }; } diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index e92a33ba4a5c..382069e28740 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -13,7 +13,6 @@ import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type ChainConfig, - DEFAULT_MAX_TXS_PER_BLOCK, type SequencerConfig, chainConfigMappings, sharedSequencerConfigMappings, @@ -38,7 +37,6 @@ export type { SequencerConfig }; */ export const DefaultSequencerConfig = { sequencerPollingIntervalMS: 500, - maxTxsPerBlock: DEFAULT_MAX_TXS_PER_BLOCK, minTxsPerBlock: 1, buildCheckpointIfEmpty: false, publishTxsWithProposals: false, @@ -78,6 +76,11 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'The number of ms to wait between polling for checking to build on the next slot.', ...numberConfigHelper(DefaultSequencerConfig.sequencerPollingIntervalMS), }, + maxTxsPerCheckpoint: { + env: 'SEQ_MAX_TX_PER_CHECKPOINT', + description: 'The maximum number of txs across all blocks in a checkpoint.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, minTxsPerBlock: { env: 'SEQ_MIN_TX_PER_BLOCK', description: 'The minimum number of txs to include in a block.', diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index 0c6f750bb8bb..3e9cd16150c8 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -271,6 +271,8 @@ export class CheckpointProposalJob implements Traceable { rollupManaLimit: this.l1Constants.rollupManaLimit, maxL2BlockGas: this.config.maxL2BlockGas, maxDABlockGas: this.config.maxDABlockGas, + maxTxsPerBlock: this.config.maxTxsPerBlock, + maxTxsPerCheckpoint: this.config.maxTxsPerCheckpoint, }); } catch (err) { this.log.error(`Built an invalid checkpoint at slot ${this.slot} (skipping proposal)`, err, { @@ -565,7 +567,7 @@ export class CheckpointProposalJob implements Traceable { ); this.setStateFn(SequencerState.CREATING_BLOCK, this.slot); - // Per-block limits derived at startup by SequencerClient.computeBlockGasLimits(), further capped + // Per-block limits derived at startup by computeBlockLimits(), further capped // by remaining checkpoint-level budgets inside CheckpointBuilder before each block is built. const blockBuilderOptions: PublicProcessorLimits = { maxTransactions: this.config.maxTxsPerBlock, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 4f8e011c1c82..464340b385ff 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -873,7 +873,7 @@ describe('sequencer', () => { sequencer.updateConfig({ enforceTimeTable: true, maxTxsPerBlock: 4, blockDurationMs: 500 }); const txs = await timesParallel(8, i => makeTx(i * 0x10000)); - block = await makeBlock(txs); + block = await makeBlock(txs.slice(0, 4)); TestUtils.mockPendingTxs(p2p, txs); await sequencer.work(); diff --git a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts index 9baf133dc1fc..42d691191ef8 100644 --- a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts +++ b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts @@ -1,5 +1,6 @@ -import { type BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { type BlockNumber, CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; +import { unfreeze } from '@aztec/foundation/types'; import { L2Block } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; import type { @@ -85,8 +86,10 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { let usedTxs: Tx[]; if (this.blockProvider) { - // Dynamic mode: get block from provider - block = this.blockProvider(); + // Dynamic mode: get block from provider, cloning to avoid shared references across multiple buildBlock calls + block = L2Block.fromBuffer(this.blockProvider().toBuffer()); + block.header.globalVariables.blockNumber = blockNumber; + await block.header.recomputeHash(); usedTxs = []; this.builtBlocks.push(block); } else { @@ -122,69 +125,69 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { completeCheckpoint(): Promise { this.completeCheckpointCalled = true; const allBlocks = this.blockProvider ? this.builtBlocks : this.blocks; - const lastBlock = allBlocks[allBlocks.length - 1]; - // Create a CheckpointHeader from the last block's header for testing - const checkpointHeader = this.createCheckpointHeader(lastBlock); - return Promise.resolve( - new Checkpoint( - makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), - checkpointHeader, - allBlocks, - this.checkpointNumber, - ), - ); + return this.buildCheckpoint(allBlocks); } getCheckpoint(): Promise { this.getCheckpointCalled = true; const builtBlocks = this.blockProvider ? this.builtBlocks : this.blocks.slice(0, this.blockIndex); - const lastBlock = builtBlocks[builtBlocks.length - 1]; - if (!lastBlock) { + if (builtBlocks.length === 0) { throw new Error('No blocks built yet'); } - // Create a CheckpointHeader from the last block's header for testing - const checkpointHeader = this.createCheckpointHeader(lastBlock); - return Promise.resolve( - new Checkpoint( - makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), - checkpointHeader, - builtBlocks, - this.checkpointNumber, - ), - ); + return this.buildCheckpoint(builtBlocks); } - /** - * Creates a CheckpointHeader from a block's header for testing. - * This is a simplified version that creates a minimal CheckpointHeader. - */ - private createCheckpointHeader(block: L2Block): CheckpointHeader { - const header = block.header; - const gv = header.globalVariables; - return CheckpointHeader.empty({ - lastArchiveRoot: header.lastArchive.root, - blockHeadersHash: Fr.random(), // Use random for testing + /** Builds a structurally valid Checkpoint from a list of blocks, fixing up indexes and archive chaining. */ + private async buildCheckpoint(blocks: L2Block[]): Promise { + // Fix up indexWithinCheckpoint and archive chaining so the checkpoint passes structural validation. + for (let i = 0; i < blocks.length; i++) { + blocks[i].indexWithinCheckpoint = IndexWithinCheckpoint(i); + if (i > 0) { + unfreeze(blocks[i].header).lastArchive = blocks[i - 1].archive; + await blocks[i].header.recomputeHash(); + } + } + + const firstBlock = blocks[0]; + const lastBlock = blocks[blocks.length - 1]; + const gv = firstBlock.header.globalVariables; + + const checkpointHeader = CheckpointHeader.empty({ + lastArchiveRoot: firstBlock.header.lastArchive.root, + blockHeadersHash: Fr.random(), slotNumber: gv.slotNumber, timestamp: gv.timestamp, coinbase: gv.coinbase, feeRecipient: gv.feeRecipient, gasFees: gv.gasFees, - totalManaUsed: header.totalManaUsed, + totalManaUsed: lastBlock.header.totalManaUsed, }); + + return new Checkpoint( + makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), + checkpointHeader, + blocks, + this.checkpointNumber, + ); } - /** Reset for reuse in another test */ - reset(): void { - this.blocks = []; + /** Resets per-checkpoint state (built blocks, consumed txs) while preserving config (blockProvider, seeded blocks). */ + resetCheckpointState(): void { this.builtBlocks = []; - this.usedTxsPerBlock = []; this.blockIndex = 0; - this.buildBlockCalls = []; this.consumedTxHashes.clear(); this.completeCheckpointCalled = false; this.getCheckpointCalled = false; + } + + /** Reset for reuse in another test */ + reset(): void { + this.blocks = []; + this.usedTxsPerBlock = []; + this.buildBlockCalls = []; this.errorOnBuild = undefined; this.blockProvider = undefined; + this.resetCheckpointState(); } } @@ -273,6 +276,8 @@ export class MockCheckpointsBuilder implements ICheckpointsBuilder { if (!this.checkpointBuilder) { // Auto-create a builder if none was set this.checkpointBuilder = new MockCheckpointBuilder(constants, checkpointNumber); + } else { + this.checkpointBuilder.resetCheckpointState(); } return Promise.resolve(this.checkpointBuilder); @@ -299,6 +304,8 @@ export class MockCheckpointsBuilder implements ICheckpointsBuilder { if (!this.checkpointBuilder) { // Auto-create a builder if none was set this.checkpointBuilder = new MockCheckpointBuilder(constants, checkpointNumber); + } else { + this.checkpointBuilder.resetCheckpointState(); } return Promise.resolve(this.checkpointBuilder); diff --git a/yarn-project/stdlib/src/block/l2_block.ts b/yarn-project/stdlib/src/block/l2_block.ts index 362a36f996a5..e7c78f332a1d 100644 --- a/yarn-project/stdlib/src/block/l2_block.ts +++ b/yarn-project/stdlib/src/block/l2_block.ts @@ -176,7 +176,7 @@ export class L2Block { } & Partial[0]> = {}, ): Promise { const archive = new AppendOnlyTreeSnapshot(Fr.random(), blockNumber + 1); - const header = BlockHeader.random({ blockNumber, ...blockHeaderOverrides }); + const header = BlockHeader.random({ ...blockHeaderOverrides, blockNumber }); const body = await Body.random({ txsPerBlock, makeTxOptions, ...txOptions }); return new L2Block(archive, header, body, checkpointNumber, indexWithinCheckpoint); } diff --git a/yarn-project/stdlib/src/checkpoint/checkpoint.ts b/yarn-project/stdlib/src/checkpoint/checkpoint.ts index 2c95d3c0be4a..6f1159533cd1 100644 --- a/yarn-project/stdlib/src/checkpoint/checkpoint.ts +++ b/yarn-project/stdlib/src/checkpoint/checkpoint.ts @@ -6,7 +6,7 @@ import { IndexWithinCheckpoint, SlotNumber, } from '@aztec/foundation/branded-types'; -import { sum } from '@aztec/foundation/collection'; +import { pick, sum } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { BufferReader, serializeSignedBigInt, serializeToBuffer } from '@aztec/foundation/serialize'; import type { FieldsOf } from '@aztec/foundation/types'; @@ -152,10 +152,12 @@ export class Checkpoint { startBlockNumber?: number; previousArchive?: AppendOnlyTreeSnapshot; feeAssetPriceModifier?: bigint; + archive?: AppendOnlyTreeSnapshot; } & Partial[0]> & Partial[1]> = {}, ) { - const header = CheckpointHeader.random(options); + const headerOptions = previousArchive ? { lastArchiveRoot: previousArchive.root, ...options } : options; + const header = CheckpointHeader.random(headerOptions); // Create blocks sequentially to chain archive roots properly. // Each block's header.lastArchive must equal the previous block's archive. @@ -166,11 +168,18 @@ export class Checkpoint { indexWithinCheckpoint: IndexWithinCheckpoint(i), ...options, ...(lastArchive ? { lastArchive } : {}), + ...pick(header, 'slotNumber', 'timestamp', 'coinbase', 'feeRecipient', 'gasFees'), }); lastArchive = block.archive; blocks.push(block); } - return new Checkpoint(AppendOnlyTreeSnapshot.random(), header, blocks, checkpointNumber, feeAssetPriceModifier); + return new Checkpoint( + options.archive ?? AppendOnlyTreeSnapshot.random(), + header, + blocks, + checkpointNumber, + feeAssetPriceModifier, + ); } } diff --git a/yarn-project/stdlib/src/checkpoint/validate.test.ts b/yarn-project/stdlib/src/checkpoint/validate.test.ts new file mode 100644 index 000000000000..6dfa314dd0c3 --- /dev/null +++ b/yarn-project/stdlib/src/checkpoint/validate.test.ts @@ -0,0 +1,233 @@ +import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import { BlockNumber, CheckpointNumber, IndexWithinCheckpoint, SlotNumber } from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; + +import { jest } from '@jest/globals'; + +import { AztecAddress } from '../aztec-address/index.js'; +import { GasFees } from '../gas/index.js'; +import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; +import { BlockHeader } from '../tx/block_header.js'; +import { Checkpoint } from './checkpoint.js'; +import { CheckpointValidationError, validateCheckpoint, validateCheckpointStructure } from './validate.js'; + +describe('validateCheckpointStructure', () => { + const checkpointNumber = CheckpointNumber(1); + + const fixedSlot = SlotNumber(42); + const fixedCoinbase = EthAddress.random(); + const fixedFeeRecipient = AztecAddress.fromField(Fr.random()); + const fixedGasFees = GasFees.random(); + const fixedTimestamp = BigInt(Math.floor(Date.now() / 1000)); + + /** Builds a valid random checkpoint with the given number of blocks. All blocks share the same slot, + * coinbase, feeRecipient, gasFees, and timestamp, and the checkpoint header's lastArchiveRoot is + * aligned with the first block. */ + async function makeValidCheckpoint(numBlocks = 2): Promise { + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks, + startBlockNumber: 1, + slotNumber: fixedSlot, + coinbase: fixedCoinbase, + feeRecipient: fixedFeeRecipient, + gasFees: fixedGasFees, + timestamp: fixedTimestamp, + }); + // Align checkpoint header's lastArchiveRoot with the first block. + checkpoint.header.lastArchiveRoot = checkpoint.blocks[0].header.lastArchive.root; + return checkpoint; + } + + it('passes on a valid single-block checkpoint', async () => { + const checkpoint = await makeValidCheckpoint(1); + expect(() => validateCheckpointStructure(checkpoint)).not.toThrow(); + }); + + it('passes on a valid multi-block checkpoint', async () => { + const checkpoint = await makeValidCheckpoint(3); + expect(() => validateCheckpointStructure(checkpoint)).not.toThrow(); + }); + + it('throws when checkpoint slot does not match first block slot', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.header.slotNumber = SlotNumber(checkpoint.blocks[0].slot + 1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/all blocks must share the same slot/); + }); + + it('throws when checkpoint lastArchiveRoot does not match first block lastArchive root', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.header.lastArchiveRoot = AppendOnlyTreeSnapshot.random().root; + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/lastArchiveRoot does not match first block/); + }); + + it('throws on empty block list', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.blocks = []; + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow('Checkpoint has no blocks'); + }); + + it('throws when block count exceeds MAX_BLOCKS_PER_CHECKPOINT', async () => { + // Build 73 blocks (MAX_BLOCKS_PER_CHECKPOINT = 72) + const checkpoint = await makeValidCheckpoint(1); + // Reuse the single block to fill up 73 slots (structure checks happen before archive chaining in loop) + const block = checkpoint.blocks[0]; + checkpoint.blocks = Array.from({ length: 73 }, (_, i) => { + const cloned = Object.create(Object.getPrototypeOf(block), Object.getOwnPropertyDescriptors(block)); + cloned.indexWithinCheckpoint = IndexWithinCheckpoint(i); + return cloned; + }); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/exceeding limit of 72/); + }); + + it('throws when indexWithinCheckpoint is wrong', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Swap the indices + const block0 = checkpoint.blocks[0]; + block0.indexWithinCheckpoint = IndexWithinCheckpoint(1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/indexWithinCheckpoint/); + }); + + it('throws when block numbers are not sequential', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Manually set block[1] to a non-sequential number (block[0].number + 2) + const block1 = checkpoint.blocks[1]; + // Override block number via header globalVariables + const gv = block1.header.globalVariables; + gv.blockNumber = BlockNumber(gv.blockNumber + 2); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/not sequential/); + }); + + it('throws when archive roots are not chained', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Break chaining: replace block[1]'s header with a new one that has a random lastArchive + const block1 = checkpoint.blocks[1]; + block1.header = BlockHeader.from({ ...block1.header, lastArchive: AppendOnlyTreeSnapshot.random() }); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/lastArchive root does not match/); + }); + + it('throws when blocks have different slot numbers', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Change block[1]'s slot to something different + const block1 = checkpoint.blocks[1]; + block1.header.globalVariables.slotNumber = SlotNumber(block1.slot + 1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/all blocks must share the same slot/); + }); + + it('throws when a block global variables do not match checkpoint header', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Mutate coinbase on block[1] to something different from the checkpoint header + checkpoint.blocks[1].header.globalVariables.coinbase = EthAddress.random(); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/global variables.*do not match checkpoint header/); + }); +}); + +describe('validateCheckpoint — limits', () => { + const checkpointNumber = CheckpointNumber(1); + const fixedSlot = SlotNumber(42); + const fixedCoinbase = EthAddress.random(); + const fixedFeeRecipient = AztecAddress.fromField(Fr.random()); + const fixedGasFees = GasFees.random(); + const fixedTimestamp = BigInt(Math.floor(Date.now() / 1000)); + + /** A known mana value injected into every block, making assertions deterministic. */ + const specificMana = 1_000_000; + + /** Opts that leave all limits wide open so structural validity is tested in isolation. */ + const validOpts = { + rollupManaLimit: Number.MAX_SAFE_INTEGER, + maxL2BlockGas: undefined as number | undefined, + maxDABlockGas: undefined as number | undefined, + }; + + /** Builds a structurally valid single-block checkpoint with a known mana value. */ + async function makeCheckpoint(): Promise { + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks: 1, + startBlockNumber: 1, + slotNumber: fixedSlot, + coinbase: fixedCoinbase, + feeRecipient: fixedFeeRecipient, + gasFees: fixedGasFees, + timestamp: fixedTimestamp, + totalManaUsed: new Fr(specificMana), + }); + checkpoint.header.lastArchiveRoot = checkpoint.blocks[0].header.lastArchive.root; + return checkpoint; + } + + it('passes when all limits are within bounds', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, validOpts)).not.toThrow(); + }); + + it('throws when checkpoint mana exceeds rollupManaLimit', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana - 1 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana - 1 })).toThrow( + /mana cost.*exceeds rollup limit/, + ); + }); + + it('passes when checkpoint mana equals rollupManaLimit', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana })).not.toThrow(); + }); + + it('throws when checkpoint DA gas exceeds MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT', async () => { + const checkpoint = await makeCheckpoint(); + jest.spyOn(checkpoint.blocks[0], 'computeDAGasUsed').mockReturnValue(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT + 1); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(CheckpointValidationError); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(/DA gas cost.*exceeds limit/); + }); + + it('throws when checkpoint blob field count exceeds limit', async () => { + const checkpoint = await makeCheckpoint(); + const maxBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB; + jest.spyOn(checkpoint, 'toBlobFields').mockReturnValue(new Array(maxBlobFields + 1).fill(Fr.ZERO)); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(CheckpointValidationError); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(/blob field count.*exceeds limit/); + }); + + it('throws when a block L2 gas exceeds maxL2BlockGas', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: specificMana - 1 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: specificMana - 1 })).toThrow( + /L2 gas used.*exceeding limit/, + ); + }); + + it('skips per-block L2 gas check when maxL2BlockGas is undefined', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: undefined })).not.toThrow(); + }); + + it('throws when a block DA gas exceeds maxDABlockGas', async () => { + const checkpoint = await makeCheckpoint(); + jest.spyOn(checkpoint.blocks[0], 'computeDAGasUsed').mockReturnValue(1000); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: 999 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: 999 })).toThrow( + /DA gas used.*exceeding limit/, + ); + }); + + it('skips per-block DA gas check when maxDABlockGas is undefined', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: undefined })).not.toThrow(); + }); +}); diff --git a/yarn-project/stdlib/src/checkpoint/validate.ts b/yarn-project/stdlib/src/checkpoint/validate.ts index a89d9409f189..1ceb9fa4c102 100644 --- a/yarn-project/stdlib/src/checkpoint/validate.ts +++ b/yarn-project/stdlib/src/checkpoint/validate.ts @@ -2,6 +2,7 @@ import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECK import type { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { sum } from '@aztec/foundation/collection'; +import { MAX_BLOCKS_PER_CHECKPOINT } from '../deserialization/index.js'; import type { Checkpoint } from './checkpoint.js'; export class CheckpointValidationError extends Error { @@ -17,6 +18,7 @@ export class CheckpointValidationError extends Error { /** * Validates a checkpoint. Throws a CheckpointValidationError if any validation fails. + * - Validates structural integrity (non-empty, block count, sequential numbers, archive chaining, slot consistency) * - Validates checkpoint blob field count against maxBlobFields limit * - Validates total L2 gas used by checkpoint blocks against the Rollup contract mana limit * - Validates total DA gas used by checkpoint blocks against MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT @@ -25,24 +27,113 @@ export class CheckpointValidationError extends Error { export function validateCheckpoint( checkpoint: Checkpoint, opts: { - rollupManaLimit: number; - maxL2BlockGas: number | undefined; - maxDABlockGas: number | undefined; + rollupManaLimit?: number; + maxL2BlockGas?: number; + maxDABlockGas?: number; + maxTxsPerCheckpoint?: number; + maxTxsPerBlock?: number; }, ): void { + validateCheckpointStructure(checkpoint); validateCheckpointLimits(checkpoint, opts); validateCheckpointBlocksGasLimits(checkpoint, opts); } +/** + * Validates structural integrity of a checkpoint. + * - Non-empty block list + * - Block count within MAX_BLOCKS_PER_CHECKPOINT + * - Checkpoint slot matches the first block's slot + * - Checkpoint lastArchiveRoot matches the first block's lastArchive root + * - Sequential block numbers without gaps + * - Sequential indexWithinCheckpoint starting at 0 + * - Archive root chaining between consecutive blocks + * - Consistent slot number across all blocks + * - Global variables (slot, timestamp, coinbase, feeRecipient, gasFees) match checkpoint header for each block + */ +export function validateCheckpointStructure(checkpoint: Checkpoint): void { + const { blocks, number, slot } = checkpoint; + + if (blocks.length === 0) { + throw new CheckpointValidationError('Checkpoint has no blocks', number, slot); + } + + if (blocks.length > MAX_BLOCKS_PER_CHECKPOINT) { + throw new CheckpointValidationError( + `Checkpoint has ${blocks.length} blocks, exceeding limit of ${MAX_BLOCKS_PER_CHECKPOINT}`, + number, + slot, + ); + } + + const firstBlock = blocks[0]; + + if (!checkpoint.header.lastArchiveRoot.equals(firstBlock.header.lastArchive.root)) { + throw new CheckpointValidationError( + `Checkpoint lastArchiveRoot does not match first block's lastArchive root`, + number, + slot, + ); + } + + for (let i = 0; i < blocks.length; i++) { + const block = blocks[i]; + + if (block.indexWithinCheckpoint !== i) { + throw new CheckpointValidationError( + `Block at index ${i} has indexWithinCheckpoint ${block.indexWithinCheckpoint}, expected ${i}`, + number, + slot, + ); + } + + if (block.slot !== slot) { + throw new CheckpointValidationError( + `Block ${block.number} has slot ${block.slot}, expected ${slot} (all blocks must share the same slot)`, + number, + slot, + ); + } + + if (!checkpoint.header.matchesGlobalVariables(block.header.globalVariables)) { + throw new CheckpointValidationError( + `Block ${block.number} global variables (slot, timestamp, coinbase, feeRecipient, gasFees) do not match checkpoint header`, + number, + slot, + ); + } + + if (i > 0) { + const prev = blocks[i - 1]; + if (block.number !== prev.number + 1) { + throw new CheckpointValidationError( + `Block numbers are not sequential: block at index ${i - 1} has number ${prev.number}, block at index ${i} has number ${block.number}`, + number, + slot, + ); + } + + if (!block.header.lastArchive.root.equals(prev.archive.root)) { + throw new CheckpointValidationError( + `Block ${block.number} lastArchive root does not match archive root of block ${prev.number}`, + number, + slot, + ); + } + } + } +} + /** Validates checkpoint blocks gas limits */ function validateCheckpointBlocksGasLimits( checkpoint: Checkpoint, opts: { - maxL2BlockGas: number | undefined; - maxDABlockGas: number | undefined; + maxL2BlockGas?: number; + maxDABlockGas?: number; + maxTxsPerBlock?: number; }, ): void { - const { maxL2BlockGas, maxDABlockGas } = opts; + const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock } = opts; if (maxL2BlockGas !== undefined) { for (const block of checkpoint.blocks) { @@ -69,43 +160,68 @@ function validateCheckpointBlocksGasLimits( } } } + + if (maxTxsPerBlock !== undefined) { + for (const block of checkpoint.blocks) { + const blockTxCount = block.body.txEffects.length; + if (blockTxCount > maxTxsPerBlock) { + throw new CheckpointValidationError( + `Block ${block.number} in checkpoint has ${blockTxCount} txs exceeding limit of ${maxTxsPerBlock}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + } } -/** Validates checkpoint max blob fields and gas limits */ +/** Validates checkpoint max blob fields, gas limits, and tx limits */ function validateCheckpointLimits( checkpoint: Checkpoint, opts: { - rollupManaLimit: number; + rollupManaLimit?: number; + maxTxsPerCheckpoint?: number; }, ): void { - const { rollupManaLimit } = opts; + const { rollupManaLimit, maxTxsPerCheckpoint } = opts; const maxBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB; const maxDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; - const checkpointMana = sum(checkpoint.blocks.map(block => block.header.totalManaUsed.toNumber())); - if (checkpointMana > rollupManaLimit) { + if (rollupManaLimit !== undefined) { + const checkpointMana = sum(checkpoint.blocks.map(block => block.header.totalManaUsed.toNumber())); + if (checkpointMana > rollupManaLimit) { + throw new CheckpointValidationError( + `Checkpoint mana cost ${checkpointMana} exceeds rollup limit of ${rollupManaLimit}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + + const checkpointDAGas = sum(checkpoint.blocks.map(block => block.computeDAGasUsed())); + if (checkpointDAGas > maxDAGas) { throw new CheckpointValidationError( - `Checkpoint mana cost ${checkpointMana} exceeds rollup limit of ${rollupManaLimit}`, + `Checkpoint DA gas cost ${checkpointDAGas} exceeds limit of ${maxDAGas}`, checkpoint.number, checkpoint.slot, ); } - const checkpointDAGas = sum(checkpoint.blocks.map(block => block.computeDAGasUsed())); - if (checkpointDAGas > maxDAGas) { + const checkpointBlobFields = checkpoint.toBlobFields().length; + if (checkpointBlobFields > maxBlobFields) { throw new CheckpointValidationError( - `Checkpoint DA gas cost ${checkpointDAGas} exceeds limit of ${maxDAGas}`, + `Checkpoint blob field count ${checkpointBlobFields} exceeds limit of ${maxBlobFields}`, checkpoint.number, checkpoint.slot, ); } - if (maxBlobFields !== undefined) { - const checkpointBlobFields = checkpoint.toBlobFields().length; - if (checkpointBlobFields > maxBlobFields) { + if (maxTxsPerCheckpoint !== undefined) { + const checkpointTxCount = sum(checkpoint.blocks.map(block => block.body.txEffects.length)); + if (checkpointTxCount > maxTxsPerCheckpoint) { throw new CheckpointValidationError( - `Checkpoint blob field count ${checkpointBlobFields} exceeds limit of ${maxBlobFields}`, + `Checkpoint tx count ${checkpointTxCount} exceeds limit of ${maxTxsPerCheckpoint}`, checkpoint.number, checkpoint.slot, ); diff --git a/yarn-project/stdlib/src/config/sequencer-config.ts b/yarn-project/stdlib/src/config/sequencer-config.ts index 31d0eca9458a..4dd540a108f1 100644 --- a/yarn-project/stdlib/src/config/sequencer-config.ts +++ b/yarn-project/stdlib/src/config/sequencer-config.ts @@ -1,10 +1,7 @@ -import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/config'; +import type { ConfigMappingsType } from '@aztec/foundation/config'; import type { SequencerConfig } from '../interfaces/configs.js'; -/** Default maximum number of transactions per block. */ -export const DEFAULT_MAX_TXS_PER_BLOCK = 32; - /** * Partial sequencer config mappings for fields that need to be shared across packages. * The full sequencer config mappings remain in sequencer-client, but shared fields @@ -32,6 +29,6 @@ export const sharedSequencerConfigMappings: ConfigMappingsType< maxTxsPerBlock: { env: 'SEQ_MAX_TX_PER_BLOCK', description: 'The maximum number of txs to include in a block.', - ...numberConfigHelper(DEFAULT_MAX_TXS_PER_BLOCK), + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), }, }; diff --git a/yarn-project/stdlib/src/interfaces/block-builder.ts b/yarn-project/stdlib/src/interfaces/block-builder.ts index 5674bb3c6906..056d05711378 100644 --- a/yarn-project/stdlib/src/interfaces/block-builder.ts +++ b/yarn-project/stdlib/src/interfaces/block-builder.ts @@ -61,6 +61,7 @@ export type FullNodeBlockBuilderConfig = Pick & { @@ -77,6 +78,7 @@ export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[ 'fakeProcessingDelayPerTxMs', 'fakeThrowAfterProcessingTxCount', 'maxTxsPerBlock', + 'maxTxsPerCheckpoint', 'maxL2BlockGas', 'maxDABlockGas', 'rollupManaLimit', diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index 08101cbf7882..d6266cdbca93 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -13,6 +13,8 @@ export interface SequencerConfig { sequencerPollingIntervalMS?: number; /** The maximum number of txs to include in a block. */ maxTxsPerBlock?: number; + /** The maximum number of txs across all blocks in a checkpoint. */ + maxTxsPerCheckpoint?: number; /** The minimum number of txs to include in a block. */ minTxsPerBlock?: number; /** The minimum number of valid txs (after execution) to include in a block. If not set, falls back to minTxsPerBlock. */ @@ -85,6 +87,7 @@ export const SequencerConfigSchema = zodFor()( z.object({ sequencerPollingIntervalMS: z.number().optional(), maxTxsPerBlock: z.number().optional(), + maxTxsPerCheckpoint: z.number().optional(), minValidTxsPerBlock: z.number().optional(), minTxsPerBlock: z.number().optional(), maxL2BlockGas: z.number().optional(), @@ -135,6 +138,8 @@ type SequencerConfigOptionalKeys = | 'txPublicSetupAllowList' | 'minValidTxsPerBlock' | 'minBlocksForCheckpoint' + | 'maxTxsPerBlock' + | 'maxTxsPerCheckpoint' | 'maxL2BlockGas' | 'maxDABlockGas' | 'gasPerBlockAllocationMultiplier'; diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index 79d33955c3d6..45b98431c5ca 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -427,10 +427,13 @@ export async function mockCheckpointAndMessages( Partial[1]> = {}, ) { const slotNumber = options.slotNumber ?? SlotNumber(Number(checkpointNumber) * 10); + const globals = GlobalVariables.random({ slotNumber, ...options }); const blocksAndMessages = []; + // Track the previous block's archive to ensure consecutive blocks have consistent archive roots. // The current block's header.lastArchive must equal the previous block's archive. let lastArchive: AppendOnlyTreeSnapshot | undefined = previousArchive; + // Pass maxEffects via txOptions so it reaches TxEffect.random const txOptions = maxEffects !== undefined ? { maxEffects } : {}; for (let i = 0; i < (blocks?.length ?? numBlocks); i++) { @@ -439,11 +442,11 @@ export async function mockCheckpointAndMessages( block: blocks?.[i] ?? (await L2Block.random(blockNumber, { + ...globals, checkpointNumber, indexWithinCheckpoint: IndexWithinCheckpoint(i), txsPerBlock: numTxsPerBlock, txOptions, - slotNumber, ...options, ...makeBlockOptions(blockNumber), ...(lastArchive ? { lastArchive } : {}), @@ -457,12 +460,18 @@ export async function mockCheckpointAndMessages( const messages = blocksAndMessages[0].messages; const inHash = computeInHashFromL1ToL2Messages(messages); - const checkpoint = await Checkpoint.random(checkpointNumber, { numBlocks: 0, slotNumber, inHash, ...options }); + const firstBlockLastArchive = blocksAndMessages[0].block.header.lastArchive; + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks: 0, + inHash, + ...options, + ...globals, + lastArchive: firstBlockLastArchive, + lastArchiveRoot: firstBlockLastArchive.root, + archive: lastArchive, + }); + checkpoint.blocks = blocksAndMessages.map(({ block }) => block); - // Set the checkpoint's archive to match the last block's archive for proper chaining. - // When the archiver reconstructs checkpoints from L1, it uses the checkpoint's archive root - // from the L1 event to set the last block's archive. Without this, the archive chain breaks. - checkpoint.archive = lastArchive!; // Return lastArchive so callers can chain it across multiple checkpoints return { checkpoint, messages, lastArchive }; diff --git a/yarn-project/stdlib/src/tx/block_header.ts b/yarn-project/stdlib/src/tx/block_header.ts index 1a1457e0f96e..6788df00ca98 100644 --- a/yarn-project/stdlib/src/tx/block_header.ts +++ b/yarn-project/stdlib/src/tx/block_header.ts @@ -176,6 +176,12 @@ export class BlockHeader { this._cachedHash = Promise.resolve(new BlockHash(hashed)); } + /** Recomputes the cached hash. Used for testing when header fields are mutated via unfreeze. */ + recomputeHash(): Promise { + this._cachedHash = undefined; + return this.hash(); + } + static random(overrides: Partial> & Partial> = {}): BlockHeader { return BlockHeader.from({ lastArchive: AppendOnlyTreeSnapshot.random(), diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index 9c25164b0c7c..c6891987fd6f 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -239,11 +239,11 @@ L1 enforces gas and blob capacity per checkpoint. The node enforces these during Per-block budgets prevent one block from consuming the entire checkpoint budget. -**Proposer**: `SequencerClient.computeBlockGasLimits()` derives budgets at startup as `min(checkpointLimit, ceil(checkpointLimit / maxBlocks * multiplier))`, where `maxBlocks` comes from the timetable and `multiplier` defaults to 2. The multiplier greater than 1 allows early blocks to use more than their even share of the checkpoint budget, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. Operators can override via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` (capped at checkpoint limits). +**Proposer**: `computeBlockLimits()` derives budgets at startup as `min(checkpointLimit, ceil(checkpointLimit / maxBlocks * multiplier))`, where `maxBlocks` comes from the timetable and `multiplier` defaults to 2. The multiplier greater than 1 allows early blocks to use more than their even share of the checkpoint budget, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. Operators can override via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` / `SEQ_MAX_TX_PER_BLOCK` (capped at checkpoint limits). Per-block TX limits follow the same derivation pattern when `SEQ_MAX_TX_PER_CHECKPOINT` is set. **Validator**: Does not enforce per-block gas budgets. Only checkpoint-level limits are checked, so that proposers can freely distribute capacity across blocks within a checkpoint. -**Checkpoint-level capping**: `CheckpointBuilder.capLimitsByCheckpointBudgets()` always runs before tx processing, capping per-block limits by `checkpointBudget - sum(used by prior blocks)` for all three dimensions. This applies to both proposer and validator paths. +**Checkpoint-level capping**: `CheckpointBuilder.capLimitsByCheckpointBudgets()` always runs before tx processing, capping per-block limits by `checkpointBudget - sum(used by prior blocks)` for all three gas dimensions and for transaction count (when `SEQ_MAX_TX_PER_CHECKPOINT` is set). This applies to both proposer and validator paths. ### Per-transaction enforcement @@ -257,6 +257,8 @@ Per-block budgets prevent one block from consuming the entire checkpoint budget. | --- | --- | --- | | `SEQ_MAX_L2_BLOCK_GAS` | *auto* | Per-block L2 gas. Auto-derived from `rollupManaLimit / maxBlocks * multiplier`. | | `SEQ_MAX_DA_BLOCK_GAS` | *auto* | Per-block DA gas. Auto-derived from checkpoint DA limit / maxBlocks * multiplier. | +| `SEQ_MAX_TX_PER_BLOCK` | *none* | Per-block tx count. If `SEQ_MAX_TX_PER_CHECKPOINT` is set and per-block is not, derived as `ceil(checkpointLimit / maxBlocks * multiplier)`. | +| `SEQ_MAX_TX_PER_CHECKPOINT` | *none* | Total txs across all blocks in a checkpoint. When set, per-block tx limit is derived from it (unless explicitly overridden) and checkpoint-level capping is enforced. | | `SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER` | 2 | Multiplier for per-block budget computation. | ## Testing Patterns diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index 89e65bc58763..0d9cf8ae6959 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -345,5 +345,75 @@ describe('CheckpointBuilder', () => { expect(afterTwoBlocks.maxBlobFields).toBeLessThan(afterOneBlock.maxBlobFields!); expect(afterOneBlock.maxBlobFields! - afterTwoBlocks.maxBlobFields!).toBe(block2BlobFieldCount); }); + + it('caps transaction count by remaining checkpoint tx budget', () => { + setupBuilder({ maxTxsPerCheckpoint: 20 }); + + // Prior block with 3 txs (each with 10 blob fields) + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10], blockBlobFieldCount: 40 }), + ]); + + const opts: PublicProcessorLimits = { maxTransactions: 15 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 20 - 3 = 17. Per-block = 15. Capped to min(15, 17) = 15. + expect(capped.maxTransactions).toBe(15); + }); + + it('caps transaction count when remaining budget is smaller than per-block limit', () => { + setupBuilder({ maxTxsPerCheckpoint: 10 }); + + // Two prior blocks with 4 txs each = 8 total + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10], blockBlobFieldCount: 50 }), + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10], blockBlobFieldCount: 50 }), + ]); + + const opts: PublicProcessorLimits = { maxTransactions: 5 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 10 - 8 = 2. Per-block = 5. Capped to min(5, 2) = 2. + expect(capped.maxTransactions).toBe(2); + }); + + it('sets transaction count from remaining budget when caller does not provide it', () => { + setupBuilder({ maxTxsPerCheckpoint: 15 }); + + // Prior block with 5 txs + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10, 10], blockBlobFieldCount: 60 }), + ]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 15 - 5 = 10 + expect(capped.maxTransactions).toBe(10); + }); + + it('does not cap transaction count when maxTxsPerCheckpoint is not set', () => { + setupBuilder(); // no maxTxsPerCheckpoint + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = { maxTransactions: 99 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Passthrough: maxTransactions = 99 + expect(capped.maxTransactions).toBe(99); + }); + + it('does not cap transaction count when maxTxsPerCheckpoint is not set and caller does not provide it', () => { + setupBuilder(); // no maxTxsPerCheckpoint + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Neither config nor caller sets it, so it remains undefined + expect(capped.maxTransactions).toBeUndefined(); + }); }); }); diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index c73abaee87c0..2bd19ff4ab8c 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -158,7 +158,7 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { */ protected capLimitsByCheckpointBudgets( opts: PublicProcessorLimits, - ): Pick { + ): Pick { const existingBlocks = this.checkpointBuilder.getBlocks(); // Remaining L2 gas (mana) @@ -188,9 +188,21 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { const cappedBlobFields = opts.maxBlobFields !== undefined ? Math.min(opts.maxBlobFields, maxBlobFieldsForTxs) : maxBlobFieldsForTxs; + // Cap transaction count by remaining checkpoint tx budget + let cappedMaxTransactions: number | undefined; + if (this.config.maxTxsPerCheckpoint !== undefined) { + const usedTxs = sum(existingBlocks.map(b => b.body.txEffects.length)); + const remainingTxs = Math.max(0, this.config.maxTxsPerCheckpoint - usedTxs); + cappedMaxTransactions = + opts.maxTransactions !== undefined ? Math.min(opts.maxTransactions, remainingTxs) : remainingTxs; + } else { + cappedMaxTransactions = opts.maxTransactions; + } + return { maxBlockGas: new Gas(cappedDAGas, cappedL2Gas), maxBlobFields: cappedBlobFields, + maxTransactions: cappedMaxTransactions, }; } diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts index cba52926ec05..80c7bd532974 100644 --- a/yarn-project/validator-client/src/validator.ha.integration.test.ts +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -193,6 +193,7 @@ describe('ValidatorClient HA Integration', () => { const metrics = new ValidatorMetrics(getTelemetryClient()); const blockProposalValidator = new BlockProposalValidator(epochCache, { txsPermitted: true, + maxTxsPerBlock: undefined, }); const blockProposalHandler = new BlockProposalHandler( checkpointsBuilder,