diff --git a/yarn-project/archiver/src/archiver.ts b/yarn-project/archiver/src/archiver.ts index de82a0482186..28c0cfa720ab 100644 --- a/yarn-project/archiver/src/archiver.ts +++ b/yarn-project/archiver/src/archiver.ts @@ -120,7 +120,11 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra }, private readonly blobClient: BlobClientInterface, instrumentation: ArchiverInstrumentation, - protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, + protected override readonly l1Constants: L1RollupConstants & { + l1StartBlockHash: Buffer32; + genesisArchiveRoot: Fr; + rollupManaLimit?: number; + }, synchronizer: ArchiverL1Synchronizer, events: ArchiverEmitter, l2TipsCache?: L2TipsCache, @@ -133,7 +137,9 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra this.synchronizer = synchronizer; this.events = events; this.l2TipsCache = l2TipsCache ?? new L2TipsCache(this.dataStore.blockStore); - this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache); + this.updater = new ArchiverDataStoreUpdater(this.dataStore, this.l2TipsCache, { + rollupManaLimit: l1Constants.rollupManaLimit, + }); // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync // are done as fast as possible. This then gets updated once the initial sync completes. diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index ca4d60f8a780..f7f2d46b44db 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -85,6 +85,7 @@ export async function createArchiver( genesisArchiveRoot, slashingProposerAddress, targetCommitteeSize, + rollupManaLimit, ] = await Promise.all([ rollup.getL1StartBlock(), rollup.getL1GenesisTime(), @@ -92,6 +93,7 @@ export async function createArchiver( rollup.getGenesisArchiveTreeRoot(), rollup.getSlashingProposerAddress(), rollup.getTargetCommitteeSize(), + rollup.getManaLimit(), ] as const); const l1StartBlockHash = await publicClient @@ -110,6 +112,7 @@ export async function createArchiver( proofSubmissionEpochs: Number(proofSubmissionEpochs), targetCommitteeSize, genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()), + rollupManaLimit: Number(rollupManaLimit), }; const archiverConfig = merge( diff --git a/yarn-project/archiver/src/modules/data_store_updater.test.ts b/yarn-project/archiver/src/modules/data_store_updater.test.ts index e261b76faab9..94721e4c22ea 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.test.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.test.ts @@ -5,9 +5,7 @@ import { ContractClassPublishedEvent } from '@aztec/protocol-contracts/class-reg import { ContractInstancePublishedEvent } from '@aztec/protocol-contracts/instance-registry'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { L2Block } from '@aztec/stdlib/block'; -import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { ContractClassLog, PrivateLog } from '@aztec/stdlib/logs'; -import { CheckpointHeader } from '@aztec/stdlib/rollup'; import '@aztec/stdlib/testing/jest'; import { readFileSync } from 'fs'; @@ -15,7 +13,7 @@ import { dirname, resolve } from 'path'; import { fileURLToPath } from 'url'; import { KVArchiverDataStore } from '../store/kv_archiver_store.js'; -import { makePublishedCheckpoint } from '../test/mock_structs.js'; +import { makeCheckpoint, makePublishedCheckpoint } from '../test/mock_structs.js'; import { ArchiverDataStoreUpdater } from './data_store_updater.js'; /** Loads the sample ContractClassPublished event payload from protocol-contracts fixtures. */ @@ -110,12 +108,7 @@ describe('ArchiverDataStoreUpdater', () => { // Make sure it has a different archive root (which it will by default from random) expect(conflictingBlock.archive.root.equals(localBlock.archive.root)).toBe(false); - const checkpointWithConflict = new Checkpoint( - conflictingBlock.archive, - CheckpointHeader.random({ slotNumber: SlotNumber(100) }), - [conflictingBlock], - CheckpointNumber(1), - ); + const checkpointWithConflict = makeCheckpoint([conflictingBlock]); const publishedCheckpoint = makePublishedCheckpoint(checkpointWithConflict, 10); // This should detect the conflict and prune the local block @@ -135,8 +128,7 @@ describe('ArchiverDataStoreUpdater', () => { block.body.txEffects[0].contractClassLogs = [contractClassLog]; block.body.txEffects[0].privateLogs = [PrivateLog.fromBuffer(getSampleContractInstancePublishedEventPayload())]; - const checkpoint = new Checkpoint(block.archive, CheckpointHeader.random(), [block], CheckpointNumber(1)); - const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10); + const publishedCheckpoint = makePublishedCheckpoint(makeCheckpoint([block]), 10); await updater.addCheckpoints([publishedCheckpoint]); @@ -166,8 +158,7 @@ describe('ArchiverDataStoreUpdater', () => { await updater.addProposedBlocks([block]); // Create checkpoint with the SAME block (same archive root) - const checkpoint = new Checkpoint(block.archive, CheckpointHeader.random(), [block], CheckpointNumber(1)); - const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10); + const publishedCheckpoint = makePublishedCheckpoint(makeCheckpoint([block]), 10); await updater.addCheckpoints([publishedCheckpoint]); @@ -196,13 +187,7 @@ describe('ArchiverDataStoreUpdater', () => { }); expect(checkpointBlock.archive.root.equals(localBlock.archive.root)).toBe(false); - const checkpoint = new Checkpoint( - checkpointBlock.archive, - CheckpointHeader.random({ slotNumber: SlotNumber(100) }), - [checkpointBlock], - CheckpointNumber(1), - ); - await updater.addCheckpoints([makePublishedCheckpoint(checkpoint, 10)]); + await updater.addCheckpoints([makePublishedCheckpoint(makeCheckpoint([checkpointBlock]), 10)]); // Verify checkpoint block is stored const storedBlock = await store.getBlock(BlockNumber(1)); diff --git a/yarn-project/archiver/src/modules/data_store_updater.ts b/yarn-project/archiver/src/modules/data_store_updater.ts index dd2e6becd57a..83864240f01d 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.ts @@ -11,7 +11,7 @@ import { ContractInstanceUpdatedEvent, } from '@aztec/protocol-contracts/instance-registry'; import type { L2Block, ValidateCheckpointResult } from '@aztec/stdlib/block'; -import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { type PublishedCheckpoint, validateCheckpoint } from '@aztec/stdlib/checkpoint'; import { type ExecutablePrivateFunctionWithMembershipProof, type UtilityFunctionWithMembershipProof, @@ -48,6 +48,7 @@ export class ArchiverDataStoreUpdater { constructor( private store: KVArchiverDataStore, private l2TipsCache?: L2TipsCache, + private opts: { rollupManaLimit?: number } = {}, ) {} /** @@ -97,6 +98,10 @@ export class ArchiverDataStoreUpdater { checkpoints: PublishedCheckpoint[], pendingChainValidationStatus?: ValidateCheckpointResult, ): Promise { + for (const checkpoint of checkpoints) { + validateCheckpoint(checkpoint.checkpoint, { rollupManaLimit: this.opts?.rollupManaLimit }); + } + const result = await this.store.transactionAsync(async () => { // Before adding checkpoints, check for conflicts with local blocks if any const { prunedBlocks, lastAlreadyInsertedBlockNumber } = await this.pruneMismatchingLocalBlocks(checkpoints); diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index ae4bca9dc898..5f75863f98db 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -69,13 +69,19 @@ export class ArchiverL1Synchronizer implements Traceable { private readonly epochCache: EpochCache, private readonly dateProvider: DateProvider, private readonly instrumentation: ArchiverInstrumentation, - private readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, + private readonly l1Constants: L1RollupConstants & { + l1StartBlockHash: Buffer32; + genesisArchiveRoot: Fr; + rollupManaLimit?: number; + }, private readonly events: ArchiverEmitter, tracer: Tracer, l2TipsCache?: L2TipsCache, private readonly log: Logger = createLogger('archiver:l1-sync'), ) { - this.updater = new ArchiverDataStoreUpdater(this.store, l2TipsCache); + this.updater = new ArchiverDataStoreUpdater(this.store, l2TipsCache, { + rollupManaLimit: l1Constants.rollupManaLimit, + }); this.tracer = tracer; } diff --git a/yarn-project/archiver/src/test/mock_structs.ts b/yarn-project/archiver/src/test/mock_structs.ts index 974141601f5b..0888d717218c 100644 --- a/yarn-project/archiver/src/test/mock_structs.ts +++ b/yarn-project/archiver/src/test/mock_structs.ts @@ -127,6 +127,25 @@ export function makeL1PublishedData(l1BlockNumber: number): L1PublishedData { return new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)); } +/** Creates a Checkpoint from a list of blocks with a header that matches the blocks' structure. */ +export function makeCheckpoint(blocks: L2Block[], checkpointNumber = CheckpointNumber(1)): Checkpoint { + const firstBlock = blocks[0]; + const { slotNumber, timestamp, coinbase, feeRecipient, gasFees } = firstBlock.header.globalVariables; + return new Checkpoint( + blocks.at(-1)!.archive, + CheckpointHeader.random({ + lastArchiveRoot: firstBlock.header.lastArchive.root, + slotNumber, + timestamp, + coinbase, + feeRecipient, + gasFees, + }), + blocks, + checkpointNumber, + ); +} + /** Wraps a Checkpoint with L1 published data and random attestations. */ export function makePublishedCheckpoint( checkpoint: Checkpoint, @@ -301,11 +320,6 @@ export async function makeCheckpointWithLogs( return txEffect; }); - const checkpoint = new Checkpoint( - AppendOnlyTreeSnapshot.random(), - CheckpointHeader.random(), - [block], - CheckpointNumber.fromBlockNumber(BlockNumber(blockNumber)), - ); + const checkpoint = makeCheckpoint([block], CheckpointNumber.fromBlockNumber(BlockNumber(blockNumber))); return makePublishedCheckpoint(checkpoint, blockNumber); } diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 6cbd2d15ecae..bcdac93e877e 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -203,6 +203,7 @@ export type EnvVar = | 'SENTINEL_HISTORIC_PROVEN_PERFORMANCE_LENGTH_IN_EPOCHS' | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' + | 'SEQ_MAX_TX_PER_CHECKPOINT' | 'SEQ_MIN_TX_PER_BLOCK' | 'SEQ_PUBLISH_TXS_WITH_PROPOSALS' | 'SEQ_MAX_DA_BLOCK_GAS' diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts index 9bc2e2888864..ea093cd2ab2e 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.test.ts @@ -61,4 +61,70 @@ describe('CheckpointProposalValidator', () => { getTxs: () => [], epochCacheMock: () => mock(), }); + + describe('maxTxsPerBlock validation', () => { + const currentSlot = SlotNumber(100); + const nextSlot = SlotNumber(101); + let epochCache: ReturnType>; + + function setupEpochCache(proposerAddress: EthAddress) { + epochCache = mock(); + epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot, nextSlot }); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(proposerAddress); + } + + it('rejects checkpoint proposal when last block txHashes exceed maxTxsPerBlock', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 3 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: expect.anything() }); + }); + + it('accepts checkpoint proposal when last block txHashes are within maxTxsPerBlock', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 5 }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 3 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + + it('skips maxTxsPerBlock check when not configured', async () => { + const signer = Secp256k1Signer.random(); + setupEpochCache(signer.address); + const validator = new CheckpointProposalValidator(epochCache, { + txsPermitted: true, + maxTxsPerBlock: undefined, + }); + + const header = makeCheckpointHeader(0, { slotNumber: currentSlot }); + const proposal = await makeCheckpointProposalAdapter({ + blockHeader: header, + lastBlockHeader: header, + signer, + txHashes: Array.from({ length: 100 }, () => TxHash.random()), + }); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'accept' }); + }); + }); }); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts index e58a007a3de7..ec12ec3442f6 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator_test_suite.ts @@ -58,7 +58,7 @@ export function sharedProposalValidatorTests { epochCache = epochCacheMock(); - validator = validatorFactory(epochCache, { txsPermitted: true }); + validator = validatorFactory(epochCache, { txsPermitted: true, maxTxsPerBlock: undefined }); epochCache.getCurrentAndNextSlot.mockReturnValue({ currentSlot: currentSlot, nextSlot: nextSlot, @@ -231,7 +231,10 @@ export function sharedProposalValidatorTests { it('returns mid tolerance error if txs not permitted and proposal contains txHashes', async () => { const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { txsPermitted: false }); + const validatorWithTxsDisabled = validatorFactory(epochCache, { + txsPermitted: false, + maxTxsPerBlock: undefined, + }); const header = makeHeader(1, 100, 100); const mockProposal = await makeProposal({ blockHeader: header, @@ -247,7 +250,10 @@ export function sharedProposalValidatorTests { const currentProposer = getSigner(); - const validatorWithTxsDisabled = validatorFactory(epochCache, { txsPermitted: false }); + const validatorWithTxsDisabled = validatorFactory(epochCache, { + txsPermitted: false, + maxTxsPerBlock: undefined, + }); const header = makeHeader(1, 100, 100); const mockProposal = await makeProposal({ blockHeader: header, diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 8da82a7d195b..783ce04e76dc 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -222,14 +222,12 @@ export class LibP2PService extends WithTracer implements P2PService { this.protocolVersion, ); - this.blockProposalValidator = new BlockProposalValidator(epochCache, { + const proposalValidatorOpts = { txsPermitted: !config.disableTransactions, maxTxsPerBlock: config.maxTxsPerBlock, - }); - this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, { - txsPermitted: !config.disableTransactions, - maxTxsPerBlock: config.maxTxsPerBlock, - }); + }; + this.blockProposalValidator = new BlockProposalValidator(epochCache, proposalValidatorOpts); + this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, proposalValidatorOpts); this.checkpointAttestationValidator = config.fishermanMode ? new FishermanAttestationValidator(epochCache, mempools.attestationPool, telemetry) : new CheckpointAttestationValidator(epochCache); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.test.ts b/yarn-project/sequencer-client/src/client/sequencer-client.test.ts new file mode 100644 index 000000000000..e325cefca47d --- /dev/null +++ b/yarn-project/sequencer-client/src/client/sequencer-client.test.ts @@ -0,0 +1,110 @@ +import { MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import { createLogger } from '@aztec/foundation/log'; + +import type { SequencerClientConfig } from '../config.js'; +import { computeBlockLimits } from './sequencer-client.js'; + +describe('computeBlockLimits', () => { + const log = createLogger('test'); + + /** Builds a minimal config with only the fields needed by computeBlockLimits. */ + function makeConfig(overrides: Partial = {}): SequencerClientConfig { + return { + ethereumSlotDuration: 12, + aztecSlotDuration: 72, + attestationPropagationTime: 3, + enforceTimeTable: true, + // No blockDurationMs -> single block mode -> maxNumberOfBlocks = 1 + ...overrides, + } as SequencerClientConfig; + } + + describe('L2 gas', () => { + it('derives maxL2BlockGas from rollupManaLimit when not explicitly set', () => { + const rollupManaLimit = 1_000_000; + // Single block mode (maxNumberOfBlocks=1), default multiplier=2: + // min(1_000_000, ceil(1_000_000 / 1 * 2)) = min(1_000_000, 2_000_000) = 1_000_000 + const result = computeBlockLimits(makeConfig(), rollupManaLimit, 12, log); + expect(result.maxL2BlockGas).toBe(rollupManaLimit); + }); + + it('uses explicit maxL2BlockGas when within rollupManaLimit', () => { + const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 500_000 }), 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(500_000); + }); + + it('caps explicit maxL2BlockGas at rollupManaLimit', () => { + const result = computeBlockLimits(makeConfig({ maxL2BlockGas: 2_000_000 }), 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(1_000_000); + }); + }); + + describe('DA gas', () => { + const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + + it('derives maxDABlockGas from DA checkpoint limit when not explicitly set', () => { + // Single block mode (maxNumberOfBlocks=1), default multiplier=2: + // min(daLimit, ceil(daLimit / 1 * 2)) = min(daLimit, daLimit * 2) = daLimit + const result = computeBlockLimits(makeConfig(), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(daLimit); + }); + + it('uses explicit maxDABlockGas when within DA checkpoint limit', () => { + const explicit = Math.floor(daLimit / 2); + const result = computeBlockLimits(makeConfig({ maxDABlockGas: explicit }), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(explicit); + }); + + it('caps explicit maxDABlockGas at DA checkpoint limit', () => { + const result = computeBlockLimits(makeConfig({ maxDABlockGas: daLimit + 100_000 }), 1_000_000, 12, log); + expect(result.maxDABlockGas).toBe(daLimit); + }); + }); + + describe('TX count', () => { + it('uses explicit maxTxsPerBlock when set', () => { + const result = computeBlockLimits(makeConfig({ maxTxsPerBlock: 10 }), 1_000_000, 12, log); + expect(result.maxTxsPerBlock).toBe(10); + }); + + it('caps maxTxsPerBlock at maxTxsPerCheckpoint', () => { + const result = computeBlockLimits( + makeConfig({ maxTxsPerBlock: 50, maxTxsPerCheckpoint: 30 }), + 1_000_000, + 12, + log, + ); + expect(result.maxTxsPerBlock).toBe(30); + }); + + it('derives maxTxsPerBlock from maxTxsPerCheckpoint when per-block not set', () => { + // Multi-block mode with maxNumberOfBlocks=5, multiplier=2: + // min(100, ceil(100 / 5 * 2)) = min(100, 40) = 40 + const config = makeConfig({ + maxTxsPerCheckpoint: 100, + blockDurationMs: 8000, + }); + const result = computeBlockLimits(config, 1_000_000, 12, log); + expect(result.maxTxsPerBlock).toBe(40); + }); + }); + + describe('multi-block mode', () => { + it('distributes budget across blocks in multi-block mode', () => { + // With blockDurationMs=8000, aztecSlotDuration=72, ethereumSlotDuration=12, + // attestationPropagationTime=3, l1PublishingTime=12: + // checkpointFinalizationTime = 1 + 3*2 + 12 = 19 + // timeReservedAtEnd = 8 + 19 = 27 + // timeAvailableForBlocks = 72 - 1 - 27 = 44 + // maxNumberOfBlocks = floor(44 / 8) = 5 + // With multiplier=2 and rollupManaLimit=1_000_000: + // maxL2BlockGas = min(1_000_000, ceil(1_000_000 / 5 * 2)) = min(1_000_000, 400_000) = 400_000 + const config = makeConfig({ blockDurationMs: 8000 }); + const result = computeBlockLimits(config, 1_000_000, 12, log); + expect(result.maxL2BlockGas).toBe(400_000); + + const daLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + expect(result.maxDABlockGas).toBe(Math.min(daLimit, Math.ceil((daLimit / 5) * 2))); + }); + }); +}); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index c55521d7b233..39b0fd6b84fa 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -1,4 +1,5 @@ import type { BlobClientInterface } from '@aztec/blob-client/client'; +import { MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; import { EpochCache } from '@aztec/epoch-cache'; import { isAnvilTestChain } from '@aztec/ethereum/chain'; import { getPublicClient } from '@aztec/ethereum/client'; @@ -18,10 +19,11 @@ import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { L1Metrics, type TelemetryClient } from '@aztec/telemetry-client'; import { FullNodeCheckpointsBuilder, NodeKeystoreAdapter, type ValidatorClient } from '@aztec/validator-client'; -import { type SequencerClientConfig, getPublisherConfigFromSequencerConfig } from '../config.js'; +import { DefaultSequencerConfig, type SequencerClientConfig, getPublisherConfigFromSequencerConfig } from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { SequencerPublisherFactory } from '../publisher/sequencer-publisher-factory.js'; import { Sequencer, type SequencerConfig } from '../sequencer/index.js'; +import { SequencerTimetable } from '../sequencer/timetable.js'; /** * Encapsulates the full sequencer and publisher. @@ -137,17 +139,6 @@ export class SequencerClient { }); const ethereumSlotDuration = config.ethereumSlotDuration; - const l1Constants = { l1GenesisTime, slotDuration: Number(slotDuration), ethereumSlotDuration }; - - const globalsBuilder = new GlobalVariableBuilder({ ...config, ...l1Constants, rollupVersion }); - - let sequencerManaLimit = config.maxL2BlockGas ?? rollupManaLimit; - if (sequencerManaLimit > rollupManaLimit) { - log.warn( - `Provided maxL2BlockGas ${sequencerManaLimit} is greater than the max allowed by L1. Setting limit to ${rollupManaLimit}.`, - ); - sequencerManaLimit = rollupManaLimit; - } // When running in anvil, assume we can post a tx up until one second before the end of an L1 slot. // Otherwise, we need the full L1 slot duration for publishing to ensure inclusion. @@ -157,6 +148,17 @@ export class SequencerClient { const l1PublishingTimeBasedOnChain = isAnvilTestChain(config.l1ChainId) ? 1 : ethereumSlotDuration; const l1PublishingTime = config.l1PublishingTime ?? l1PublishingTimeBasedOnChain; + const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock } = computeBlockLimits( + config, + rollupManaLimit, + l1PublishingTime, + log, + ); + + const l1Constants = { l1GenesisTime, slotDuration: Number(slotDuration), ethereumSlotDuration, rollupManaLimit }; + + const globalsBuilder = new GlobalVariableBuilder({ ...config, ...l1Constants, rollupVersion }); + const sequencer = new Sequencer( publisherFactory, validatorClient, @@ -171,7 +173,7 @@ export class SequencerClient { deps.dateProvider, epochCache, rollupContract, - { ...config, l1PublishingTime, maxL2BlockGas: sequencerManaLimit }, + { ...config, l1PublishingTime, maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }, telemetryClient, log, ); @@ -234,3 +236,90 @@ export class SequencerClient { return this.sequencer.maxL2BlockGas; } } + +/** + * Computes per-block L2 gas, DA gas, and TX count budgets based on the L1 rollup limits and the timetable. + * If the user explicitly set a limit, it is capped at the corresponding checkpoint limit. + * Otherwise, derives it as (checkpointLimit / maxBlocks) * multiplier, capped at the checkpoint limit. + */ +export function computeBlockLimits( + config: SequencerClientConfig, + rollupManaLimit: number, + l1PublishingTime: number, + log: ReturnType, +): { maxL2BlockGas: number; maxDABlockGas: number; maxTxsPerBlock: number } { + const maxNumberOfBlocks = new SequencerTimetable({ + ethereumSlotDuration: config.ethereumSlotDuration, + aztecSlotDuration: config.aztecSlotDuration, + l1PublishingTime, + p2pPropagationTime: config.attestationPropagationTime, + blockDurationMs: config.blockDurationMs, + enforce: config.enforceTimeTable ?? DefaultSequencerConfig.enforceTimeTable, + }).maxNumberOfBlocks; + + const multiplier = config.gasPerBlockAllocationMultiplier ?? DefaultSequencerConfig.gasPerBlockAllocationMultiplier; + + // Compute maxL2BlockGas + let maxL2BlockGas: number; + if (config.maxL2BlockGas !== undefined) { + if (config.maxL2BlockGas > rollupManaLimit) { + log.warn( + `Provided MAX_L2_BLOCK_GAS ${config.maxL2BlockGas} exceeds L1 rollup mana limit ${rollupManaLimit} (capping)`, + ); + maxL2BlockGas = rollupManaLimit; + } else { + maxL2BlockGas = config.maxL2BlockGas; + } + } else { + maxL2BlockGas = Math.min(rollupManaLimit, Math.ceil((rollupManaLimit / maxNumberOfBlocks) * multiplier)); + } + + // Compute maxDABlockGas + const daCheckpointLimit = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + let maxDABlockGas: number; + if (config.maxDABlockGas !== undefined) { + if (config.maxDABlockGas > daCheckpointLimit) { + log.warn( + `Provided MAX_DA_BLOCK_GAS ${config.maxDABlockGas} exceeds DA checkpoint limit ${daCheckpointLimit} (capping)`, + ); + maxDABlockGas = daCheckpointLimit; + } else { + maxDABlockGas = config.maxDABlockGas; + } + } else { + maxDABlockGas = Math.min(daCheckpointLimit, Math.ceil((daCheckpointLimit / maxNumberOfBlocks) * multiplier)); + } + + // Compute maxTxsPerBlock + const defaultMaxTxsPerBlock = 32; + let maxTxsPerBlock: number; + if (config.maxTxsPerBlock !== undefined) { + if (config.maxTxsPerCheckpoint !== undefined && config.maxTxsPerBlock > config.maxTxsPerCheckpoint) { + log.warn( + `Provided MAX_TX_PER_BLOCK ${config.maxTxsPerBlock} exceeds MAX_TX_PER_CHECKPOINT ${config.maxTxsPerCheckpoint} (capping)`, + ); + maxTxsPerBlock = config.maxTxsPerCheckpoint; + } else { + maxTxsPerBlock = config.maxTxsPerBlock; + } + } else if (config.maxTxsPerCheckpoint !== undefined) { + maxTxsPerBlock = Math.min( + config.maxTxsPerCheckpoint, + Math.ceil((config.maxTxsPerCheckpoint / maxNumberOfBlocks) * multiplier), + ); + } else { + maxTxsPerBlock = defaultMaxTxsPerBlock; + } + + log.info(`Computed block limits L2=${maxL2BlockGas} DA=${maxDABlockGas} maxTxs=${maxTxsPerBlock}`, { + maxL2BlockGas, + maxDABlockGas, + maxTxsPerBlock, + rollupManaLimit, + daCheckpointLimit, + maxNumberOfBlocks, + multiplier, + }); + + return { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock }; +} diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index d6aa5d93f9f5..dc28e6221341 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -13,7 +13,6 @@ import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type ChainConfig, - DEFAULT_MAX_TXS_PER_BLOCK, type SequencerConfig, chainConfigMappings, sharedSequencerConfigMappings, @@ -38,7 +37,6 @@ export type { SequencerConfig }; */ export const DefaultSequencerConfig: ResolvedSequencerConfig = { sequencerPollingIntervalMS: 500, - maxTxsPerBlock: DEFAULT_MAX_TXS_PER_BLOCK, minTxsPerBlock: 1, buildCheckpointIfEmpty: false, publishTxsWithProposals: false, @@ -59,6 +57,7 @@ export const DefaultSequencerConfig: ResolvedSequencerConfig = { shuffleAttestationOrdering: false, skipPushProposedBlocksToArchiver: false, skipPublishingCheckpointsPercent: 0, + gasPerBlockAllocationMultiplier: 2, }; /** @@ -80,6 +79,11 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'The number of ms to wait between polling for checking to build on the next slot.', ...numberConfigHelper(DefaultSequencerConfig.sequencerPollingIntervalMS), }, + maxTxsPerCheckpoint: { + env: 'SEQ_MAX_TX_PER_CHECKPOINT', + description: 'The maximum number of txs across all blocks in a checkpoint.', + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), + }, minTxsPerBlock: { env: 'SEQ_MIN_TX_PER_BLOCK', description: 'The minimum number of txs to include in a block.', @@ -202,6 +206,11 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'Shuffle attestation ordering to create invalid ordering (for testing only)', ...booleanConfigHelper(DefaultSequencerConfig.shuffleAttestationOrdering), }, + gasPerBlockAllocationMultiplier: { + env: 'SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER', + description: 'Multiplier for per-block budget computation (default 2).', + parseEnv: (val: string) => (val ? parseFloat(val) : undefined), + }, ...sharedSequencerConfigMappings, buildCheckpointIfEmpty: { env: 'SEQ_BUILD_CHECKPOINT_IF_EMPTY', diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index b9e960fb7c1f..f3bf9eacbbc6 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -32,7 +32,7 @@ import { type L2BlockSource, MaliciousCommitteeAttestationsAndSigners, } from '@aztec/stdlib/block'; -import type { Checkpoint } from '@aztec/stdlib/checkpoint'; +import { type Checkpoint, validateCheckpoint } from '@aztec/stdlib/checkpoint'; import { getSlotStartBuildTimestamp } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; import { @@ -267,6 +267,22 @@ export class CheckpointProposalJob implements Traceable { this.setStateFn(SequencerState.ASSEMBLING_CHECKPOINT, this.slot); const checkpoint = await checkpointBuilder.completeCheckpoint(); + // Final validation round for the checkpoint before we propose it, just for safety + try { + validateCheckpoint(checkpoint, { + rollupManaLimit: this.l1Constants.rollupManaLimit, + maxL2BlockGas: this.config.maxL2BlockGas, + maxDABlockGas: this.config.maxDABlockGas, + maxTxsPerBlock: this.config.maxTxsPerBlock, + maxTxsPerCheckpoint: this.config.maxTxsPerCheckpoint, + }); + } catch (err) { + this.log.error(`Built an invalid checkpoint at slot ${this.slot} (skipping proposal)`, err, { + checkpoint: checkpoint.header.toInspect(), + }); + return undefined; + } + // Record checkpoint-level build metrics this.metrics.recordCheckpointBuild( checkpointBuildTimer.ms(), @@ -572,6 +588,8 @@ export class CheckpointProposalJob implements Traceable { const blockEndOverhead = getNumBlockEndBlobFields(indexWithinCheckpoint === 0); const maxBlobFieldsForTxs = remainingBlobFields - blockEndOverhead; + // Per-block limits derived at startup by computeBlockLimits(), further capped + // by remaining checkpoint-level budgets inside CheckpointBuilder before each block is built. const blockBuilderOptions: PublicProcessorLimits = { maxTransactions: this.config.maxTxsPerBlock, maxBlockSize: this.config.maxBlockSizeInBytes, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index cb625f07002d..7c45c8a60384 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -871,7 +871,7 @@ describe('sequencer', () => { sequencer.updateConfig({ enforceTimeTable: true, maxTxsPerBlock: 4, blockDurationMs: 500 }); const txs = await timesParallel(8, i => makeTx(i * 0x10000)); - block = await makeBlock(txs); + block = await makeBlock(txs.slice(0, 4)); TestUtils.mockPendingTxs(p2p, txs); await sequencer.work(); diff --git a/yarn-project/sequencer-client/src/sequencer/types.ts b/yarn-project/sequencer-client/src/sequencer/types.ts index ef4cebf699c2..312c9613cce5 100644 --- a/yarn-project/sequencer-client/src/sequencer/types.ts +++ b/yarn-project/sequencer-client/src/sequencer/types.ts @@ -3,4 +3,7 @@ import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; export type SequencerRollupConstants = Pick< L1RollupConstants, 'ethereumSlotDuration' | 'l1GenesisTime' | 'slotDuration' ->; +> & { + /** Total L2 gas (mana) allowed per checkpoint. Fetched from L1 getManaLimit(). */ + rollupManaLimit: number; +}; diff --git a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts index 60cc606570f8..15be31df26c4 100644 --- a/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts +++ b/yarn-project/sequencer-client/src/test/mock_checkpoint_builder.ts @@ -1,5 +1,6 @@ -import { type BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { type BlockNumber, CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; +import { unfreeze } from '@aztec/foundation/types'; import { L2Block } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { Gas } from '@aztec/stdlib/gas'; @@ -86,8 +87,10 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { let usedTxs: Tx[]; if (this.blockProvider) { - // Dynamic mode: get block from provider - block = this.blockProvider(); + // Dynamic mode: get block from provider, cloning to avoid shared references across multiple buildBlock calls + block = L2Block.fromBuffer(this.blockProvider().toBuffer()); + block.header.globalVariables.blockNumber = blockNumber; + await block.header.recomputeHash(); usedTxs = []; this.builtBlocks.push(block); } else { @@ -125,69 +128,69 @@ export class MockCheckpointBuilder implements ICheckpointBlockBuilder { completeCheckpoint(): Promise { this.completeCheckpointCalled = true; const allBlocks = this.blockProvider ? this.builtBlocks : this.blocks; - const lastBlock = allBlocks[allBlocks.length - 1]; - // Create a CheckpointHeader from the last block's header for testing - const checkpointHeader = this.createCheckpointHeader(lastBlock); - return Promise.resolve( - new Checkpoint( - makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), - checkpointHeader, - allBlocks, - this.checkpointNumber, - ), - ); + return this.buildCheckpoint(allBlocks); } getCheckpoint(): Promise { this.getCheckpointCalled = true; const builtBlocks = this.blockProvider ? this.builtBlocks : this.blocks.slice(0, this.blockIndex); - const lastBlock = builtBlocks[builtBlocks.length - 1]; - if (!lastBlock) { + if (builtBlocks.length === 0) { throw new Error('No blocks built yet'); } - // Create a CheckpointHeader from the last block's header for testing - const checkpointHeader = this.createCheckpointHeader(lastBlock); - return Promise.resolve( - new Checkpoint( - makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), - checkpointHeader, - builtBlocks, - this.checkpointNumber, - ), - ); + return this.buildCheckpoint(builtBlocks); } - /** - * Creates a CheckpointHeader from a block's header for testing. - * This is a simplified version that creates a minimal CheckpointHeader. - */ - private createCheckpointHeader(block: L2Block): CheckpointHeader { - const header = block.header; - const gv = header.globalVariables; - return CheckpointHeader.empty({ - lastArchiveRoot: header.lastArchive.root, - blockHeadersHash: Fr.random(), // Use random for testing + /** Builds a structurally valid Checkpoint from a list of blocks, fixing up indexes and archive chaining. */ + private async buildCheckpoint(blocks: L2Block[]): Promise { + // Fix up indexWithinCheckpoint and archive chaining so the checkpoint passes structural validation. + for (let i = 0; i < blocks.length; i++) { + blocks[i].indexWithinCheckpoint = IndexWithinCheckpoint(i); + if (i > 0) { + unfreeze(blocks[i].header).lastArchive = blocks[i - 1].archive; + await blocks[i].header.recomputeHash(); + } + } + + const firstBlock = blocks[0]; + const lastBlock = blocks[blocks.length - 1]; + const gv = firstBlock.header.globalVariables; + + const checkpointHeader = CheckpointHeader.empty({ + lastArchiveRoot: firstBlock.header.lastArchive.root, + blockHeadersHash: Fr.random(), slotNumber: gv.slotNumber, timestamp: gv.timestamp, coinbase: gv.coinbase, feeRecipient: gv.feeRecipient, gasFees: gv.gasFees, - totalManaUsed: header.totalManaUsed, + totalManaUsed: lastBlock.header.totalManaUsed, }); + + return new Checkpoint( + makeAppendOnlyTreeSnapshot(lastBlock.header.globalVariables.blockNumber + 1), + checkpointHeader, + blocks, + this.checkpointNumber, + ); } - /** Reset for reuse in another test */ - reset(): void { - this.blocks = []; + /** Resets per-checkpoint state (built blocks, consumed txs) while preserving config (blockProvider, seeded blocks). */ + resetCheckpointState(): void { this.builtBlocks = []; - this.usedTxsPerBlock = []; this.blockIndex = 0; - this.buildBlockCalls = []; this.consumedTxHashes.clear(); this.completeCheckpointCalled = false; this.getCheckpointCalled = false; + } + + /** Reset for reuse in another test */ + reset(): void { + this.blocks = []; + this.usedTxsPerBlock = []; + this.buildBlockCalls = []; this.errorOnBuild = undefined; this.blockProvider = undefined; + this.resetCheckpointState(); } } @@ -275,6 +278,8 @@ export class MockCheckpointsBuilder implements ICheckpointsBuilder { if (!this.checkpointBuilder) { // Auto-create a builder if none was set this.checkpointBuilder = new MockCheckpointBuilder(constants, checkpointNumber); + } else { + this.checkpointBuilder.resetCheckpointState(); } return Promise.resolve(this.checkpointBuilder); diff --git a/yarn-project/stdlib/src/block/l2_block.ts b/yarn-project/stdlib/src/block/l2_block.ts index 15f037082a91..739988cda8f7 100644 --- a/yarn-project/stdlib/src/block/l2_block.ts +++ b/yarn-project/stdlib/src/block/l2_block.ts @@ -1,4 +1,5 @@ import { type BlockBlobData, encodeBlockBlobData } from '@aztec/blob-lib/encoding'; +import { DA_GAS_PER_FIELD } from '@aztec/constants'; import { BlockNumber, CheckpointNumber, @@ -175,7 +176,7 @@ export class L2Block { } & Partial[0]> = {}, ): Promise { const archive = new AppendOnlyTreeSnapshot(Fr.random(), blockNumber + 1); - const header = BlockHeader.random({ blockNumber, ...blockHeaderOverrides }); + const header = BlockHeader.random({ ...blockHeaderOverrides, blockNumber }); const body = await Body.random({ txsPerBlock, makeTxOptions, ...txOptions }); return new L2Block(archive, header, body, checkpointNumber, indexWithinCheckpoint); } @@ -221,4 +222,13 @@ export class L2Block { timestamp: this.header.globalVariables.timestamp, }; } + + /** + * Computes the total DA gas used by this block. + * This may change in the future, but we cannot access the actual DA gas used in a block since it's not exposed + * in the L2BlockHeader, so we have to rely on recomputing it. + */ + computeDAGasUsed(): number { + return this.body.txEffects.reduce((total, txEffect) => total + txEffect.getNumBlobFields(), 0) * DA_GAS_PER_FIELD; + } } diff --git a/yarn-project/stdlib/src/checkpoint/checkpoint.ts b/yarn-project/stdlib/src/checkpoint/checkpoint.ts index 2c95d3c0be4a..6f1159533cd1 100644 --- a/yarn-project/stdlib/src/checkpoint/checkpoint.ts +++ b/yarn-project/stdlib/src/checkpoint/checkpoint.ts @@ -6,7 +6,7 @@ import { IndexWithinCheckpoint, SlotNumber, } from '@aztec/foundation/branded-types'; -import { sum } from '@aztec/foundation/collection'; +import { pick, sum } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { BufferReader, serializeSignedBigInt, serializeToBuffer } from '@aztec/foundation/serialize'; import type { FieldsOf } from '@aztec/foundation/types'; @@ -152,10 +152,12 @@ export class Checkpoint { startBlockNumber?: number; previousArchive?: AppendOnlyTreeSnapshot; feeAssetPriceModifier?: bigint; + archive?: AppendOnlyTreeSnapshot; } & Partial[0]> & Partial[1]> = {}, ) { - const header = CheckpointHeader.random(options); + const headerOptions = previousArchive ? { lastArchiveRoot: previousArchive.root, ...options } : options; + const header = CheckpointHeader.random(headerOptions); // Create blocks sequentially to chain archive roots properly. // Each block's header.lastArchive must equal the previous block's archive. @@ -166,11 +168,18 @@ export class Checkpoint { indexWithinCheckpoint: IndexWithinCheckpoint(i), ...options, ...(lastArchive ? { lastArchive } : {}), + ...pick(header, 'slotNumber', 'timestamp', 'coinbase', 'feeRecipient', 'gasFees'), }); lastArchive = block.archive; blocks.push(block); } - return new Checkpoint(AppendOnlyTreeSnapshot.random(), header, blocks, checkpointNumber, feeAssetPriceModifier); + return new Checkpoint( + options.archive ?? AppendOnlyTreeSnapshot.random(), + header, + blocks, + checkpointNumber, + feeAssetPriceModifier, + ); } } diff --git a/yarn-project/stdlib/src/checkpoint/index.ts b/yarn-project/stdlib/src/checkpoint/index.ts index d86f88c87bbb..96c176e1d861 100644 --- a/yarn-project/stdlib/src/checkpoint/index.ts +++ b/yarn-project/stdlib/src/checkpoint/index.ts @@ -2,3 +2,4 @@ export * from './checkpoint.js'; export * from './checkpoint_data.js'; export * from './checkpoint_info.js'; export * from './published_checkpoint.js'; +export * from './validate.js'; diff --git a/yarn-project/stdlib/src/checkpoint/validate.test.ts b/yarn-project/stdlib/src/checkpoint/validate.test.ts new file mode 100644 index 000000000000..6dfa314dd0c3 --- /dev/null +++ b/yarn-project/stdlib/src/checkpoint/validate.test.ts @@ -0,0 +1,233 @@ +import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import { BlockNumber, CheckpointNumber, IndexWithinCheckpoint, SlotNumber } from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; + +import { jest } from '@jest/globals'; + +import { AztecAddress } from '../aztec-address/index.js'; +import { GasFees } from '../gas/index.js'; +import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; +import { BlockHeader } from '../tx/block_header.js'; +import { Checkpoint } from './checkpoint.js'; +import { CheckpointValidationError, validateCheckpoint, validateCheckpointStructure } from './validate.js'; + +describe('validateCheckpointStructure', () => { + const checkpointNumber = CheckpointNumber(1); + + const fixedSlot = SlotNumber(42); + const fixedCoinbase = EthAddress.random(); + const fixedFeeRecipient = AztecAddress.fromField(Fr.random()); + const fixedGasFees = GasFees.random(); + const fixedTimestamp = BigInt(Math.floor(Date.now() / 1000)); + + /** Builds a valid random checkpoint with the given number of blocks. All blocks share the same slot, + * coinbase, feeRecipient, gasFees, and timestamp, and the checkpoint header's lastArchiveRoot is + * aligned with the first block. */ + async function makeValidCheckpoint(numBlocks = 2): Promise { + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks, + startBlockNumber: 1, + slotNumber: fixedSlot, + coinbase: fixedCoinbase, + feeRecipient: fixedFeeRecipient, + gasFees: fixedGasFees, + timestamp: fixedTimestamp, + }); + // Align checkpoint header's lastArchiveRoot with the first block. + checkpoint.header.lastArchiveRoot = checkpoint.blocks[0].header.lastArchive.root; + return checkpoint; + } + + it('passes on a valid single-block checkpoint', async () => { + const checkpoint = await makeValidCheckpoint(1); + expect(() => validateCheckpointStructure(checkpoint)).not.toThrow(); + }); + + it('passes on a valid multi-block checkpoint', async () => { + const checkpoint = await makeValidCheckpoint(3); + expect(() => validateCheckpointStructure(checkpoint)).not.toThrow(); + }); + + it('throws when checkpoint slot does not match first block slot', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.header.slotNumber = SlotNumber(checkpoint.blocks[0].slot + 1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/all blocks must share the same slot/); + }); + + it('throws when checkpoint lastArchiveRoot does not match first block lastArchive root', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.header.lastArchiveRoot = AppendOnlyTreeSnapshot.random().root; + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/lastArchiveRoot does not match first block/); + }); + + it('throws on empty block list', async () => { + const checkpoint = await makeValidCheckpoint(1); + checkpoint.blocks = []; + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow('Checkpoint has no blocks'); + }); + + it('throws when block count exceeds MAX_BLOCKS_PER_CHECKPOINT', async () => { + // Build 73 blocks (MAX_BLOCKS_PER_CHECKPOINT = 72) + const checkpoint = await makeValidCheckpoint(1); + // Reuse the single block to fill up 73 slots (structure checks happen before archive chaining in loop) + const block = checkpoint.blocks[0]; + checkpoint.blocks = Array.from({ length: 73 }, (_, i) => { + const cloned = Object.create(Object.getPrototypeOf(block), Object.getOwnPropertyDescriptors(block)); + cloned.indexWithinCheckpoint = IndexWithinCheckpoint(i); + return cloned; + }); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/exceeding limit of 72/); + }); + + it('throws when indexWithinCheckpoint is wrong', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Swap the indices + const block0 = checkpoint.blocks[0]; + block0.indexWithinCheckpoint = IndexWithinCheckpoint(1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/indexWithinCheckpoint/); + }); + + it('throws when block numbers are not sequential', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Manually set block[1] to a non-sequential number (block[0].number + 2) + const block1 = checkpoint.blocks[1]; + // Override block number via header globalVariables + const gv = block1.header.globalVariables; + gv.blockNumber = BlockNumber(gv.blockNumber + 2); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/not sequential/); + }); + + it('throws when archive roots are not chained', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Break chaining: replace block[1]'s header with a new one that has a random lastArchive + const block1 = checkpoint.blocks[1]; + block1.header = BlockHeader.from({ ...block1.header, lastArchive: AppendOnlyTreeSnapshot.random() }); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/lastArchive root does not match/); + }); + + it('throws when blocks have different slot numbers', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Change block[1]'s slot to something different + const block1 = checkpoint.blocks[1]; + block1.header.globalVariables.slotNumber = SlotNumber(block1.slot + 1); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/all blocks must share the same slot/); + }); + + it('throws when a block global variables do not match checkpoint header', async () => { + const checkpoint = await makeValidCheckpoint(2); + // Mutate coinbase on block[1] to something different from the checkpoint header + checkpoint.blocks[1].header.globalVariables.coinbase = EthAddress.random(); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(CheckpointValidationError); + expect(() => validateCheckpointStructure(checkpoint)).toThrow(/global variables.*do not match checkpoint header/); + }); +}); + +describe('validateCheckpoint — limits', () => { + const checkpointNumber = CheckpointNumber(1); + const fixedSlot = SlotNumber(42); + const fixedCoinbase = EthAddress.random(); + const fixedFeeRecipient = AztecAddress.fromField(Fr.random()); + const fixedGasFees = GasFees.random(); + const fixedTimestamp = BigInt(Math.floor(Date.now() / 1000)); + + /** A known mana value injected into every block, making assertions deterministic. */ + const specificMana = 1_000_000; + + /** Opts that leave all limits wide open so structural validity is tested in isolation. */ + const validOpts = { + rollupManaLimit: Number.MAX_SAFE_INTEGER, + maxL2BlockGas: undefined as number | undefined, + maxDABlockGas: undefined as number | undefined, + }; + + /** Builds a structurally valid single-block checkpoint with a known mana value. */ + async function makeCheckpoint(): Promise { + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks: 1, + startBlockNumber: 1, + slotNumber: fixedSlot, + coinbase: fixedCoinbase, + feeRecipient: fixedFeeRecipient, + gasFees: fixedGasFees, + timestamp: fixedTimestamp, + totalManaUsed: new Fr(specificMana), + }); + checkpoint.header.lastArchiveRoot = checkpoint.blocks[0].header.lastArchive.root; + return checkpoint; + } + + it('passes when all limits are within bounds', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, validOpts)).not.toThrow(); + }); + + it('throws when checkpoint mana exceeds rollupManaLimit', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana - 1 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana - 1 })).toThrow( + /mana cost.*exceeds rollup limit/, + ); + }); + + it('passes when checkpoint mana equals rollupManaLimit', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, rollupManaLimit: specificMana })).not.toThrow(); + }); + + it('throws when checkpoint DA gas exceeds MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT', async () => { + const checkpoint = await makeCheckpoint(); + jest.spyOn(checkpoint.blocks[0], 'computeDAGasUsed').mockReturnValue(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT + 1); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(CheckpointValidationError); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(/DA gas cost.*exceeds limit/); + }); + + it('throws when checkpoint blob field count exceeds limit', async () => { + const checkpoint = await makeCheckpoint(); + const maxBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB; + jest.spyOn(checkpoint, 'toBlobFields').mockReturnValue(new Array(maxBlobFields + 1).fill(Fr.ZERO)); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(CheckpointValidationError); + expect(() => validateCheckpoint(checkpoint, validOpts)).toThrow(/blob field count.*exceeds limit/); + }); + + it('throws when a block L2 gas exceeds maxL2BlockGas', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: specificMana - 1 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: specificMana - 1 })).toThrow( + /L2 gas used.*exceeding limit/, + ); + }); + + it('skips per-block L2 gas check when maxL2BlockGas is undefined', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxL2BlockGas: undefined })).not.toThrow(); + }); + + it('throws when a block DA gas exceeds maxDABlockGas', async () => { + const checkpoint = await makeCheckpoint(); + jest.spyOn(checkpoint.blocks[0], 'computeDAGasUsed').mockReturnValue(1000); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: 999 })).toThrow( + CheckpointValidationError, + ); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: 999 })).toThrow( + /DA gas used.*exceeding limit/, + ); + }); + + it('skips per-block DA gas check when maxDABlockGas is undefined', async () => { + const checkpoint = await makeCheckpoint(); + expect(() => validateCheckpoint(checkpoint, { ...validOpts, maxDABlockGas: undefined })).not.toThrow(); + }); +}); diff --git a/yarn-project/stdlib/src/checkpoint/validate.ts b/yarn-project/stdlib/src/checkpoint/validate.ts new file mode 100644 index 000000000000..1ceb9fa4c102 --- /dev/null +++ b/yarn-project/stdlib/src/checkpoint/validate.ts @@ -0,0 +1,230 @@ +import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; +import type { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { sum } from '@aztec/foundation/collection'; + +import { MAX_BLOCKS_PER_CHECKPOINT } from '../deserialization/index.js'; +import type { Checkpoint } from './checkpoint.js'; + +export class CheckpointValidationError extends Error { + constructor( + message: string, + public readonly checkpointNumber: CheckpointNumber, + public readonly slot: SlotNumber, + ) { + super(message); + this.name = 'CheckpointValidationError'; + } +} + +/** + * Validates a checkpoint. Throws a CheckpointValidationError if any validation fails. + * - Validates structural integrity (non-empty, block count, sequential numbers, archive chaining, slot consistency) + * - Validates checkpoint blob field count against maxBlobFields limit + * - Validates total L2 gas used by checkpoint blocks against the Rollup contract mana limit + * - Validates total DA gas used by checkpoint blocks against MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT + * - Validates individual block L2 gas and DA gas against maxL2BlockGas and maxDABlockGas limits + */ +export function validateCheckpoint( + checkpoint: Checkpoint, + opts: { + rollupManaLimit?: number; + maxL2BlockGas?: number; + maxDABlockGas?: number; + maxTxsPerCheckpoint?: number; + maxTxsPerBlock?: number; + }, +): void { + validateCheckpointStructure(checkpoint); + validateCheckpointLimits(checkpoint, opts); + validateCheckpointBlocksGasLimits(checkpoint, opts); +} + +/** + * Validates structural integrity of a checkpoint. + * - Non-empty block list + * - Block count within MAX_BLOCKS_PER_CHECKPOINT + * - Checkpoint slot matches the first block's slot + * - Checkpoint lastArchiveRoot matches the first block's lastArchive root + * - Sequential block numbers without gaps + * - Sequential indexWithinCheckpoint starting at 0 + * - Archive root chaining between consecutive blocks + * - Consistent slot number across all blocks + * - Global variables (slot, timestamp, coinbase, feeRecipient, gasFees) match checkpoint header for each block + */ +export function validateCheckpointStructure(checkpoint: Checkpoint): void { + const { blocks, number, slot } = checkpoint; + + if (blocks.length === 0) { + throw new CheckpointValidationError('Checkpoint has no blocks', number, slot); + } + + if (blocks.length > MAX_BLOCKS_PER_CHECKPOINT) { + throw new CheckpointValidationError( + `Checkpoint has ${blocks.length} blocks, exceeding limit of ${MAX_BLOCKS_PER_CHECKPOINT}`, + number, + slot, + ); + } + + const firstBlock = blocks[0]; + + if (!checkpoint.header.lastArchiveRoot.equals(firstBlock.header.lastArchive.root)) { + throw new CheckpointValidationError( + `Checkpoint lastArchiveRoot does not match first block's lastArchive root`, + number, + slot, + ); + } + + for (let i = 0; i < blocks.length; i++) { + const block = blocks[i]; + + if (block.indexWithinCheckpoint !== i) { + throw new CheckpointValidationError( + `Block at index ${i} has indexWithinCheckpoint ${block.indexWithinCheckpoint}, expected ${i}`, + number, + slot, + ); + } + + if (block.slot !== slot) { + throw new CheckpointValidationError( + `Block ${block.number} has slot ${block.slot}, expected ${slot} (all blocks must share the same slot)`, + number, + slot, + ); + } + + if (!checkpoint.header.matchesGlobalVariables(block.header.globalVariables)) { + throw new CheckpointValidationError( + `Block ${block.number} global variables (slot, timestamp, coinbase, feeRecipient, gasFees) do not match checkpoint header`, + number, + slot, + ); + } + + if (i > 0) { + const prev = blocks[i - 1]; + if (block.number !== prev.number + 1) { + throw new CheckpointValidationError( + `Block numbers are not sequential: block at index ${i - 1} has number ${prev.number}, block at index ${i} has number ${block.number}`, + number, + slot, + ); + } + + if (!block.header.lastArchive.root.equals(prev.archive.root)) { + throw new CheckpointValidationError( + `Block ${block.number} lastArchive root does not match archive root of block ${prev.number}`, + number, + slot, + ); + } + } + } +} + +/** Validates checkpoint blocks gas limits */ +function validateCheckpointBlocksGasLimits( + checkpoint: Checkpoint, + opts: { + maxL2BlockGas?: number; + maxDABlockGas?: number; + maxTxsPerBlock?: number; + }, +): void { + const { maxL2BlockGas, maxDABlockGas, maxTxsPerBlock } = opts; + + if (maxL2BlockGas !== undefined) { + for (const block of checkpoint.blocks) { + const blockL2Gas = block.header.totalManaUsed.toNumber(); + if (blockL2Gas > maxL2BlockGas) { + throw new CheckpointValidationError( + `Block ${block.number} in checkpoint has L2 gas used ${blockL2Gas} exceeding limit of ${maxL2BlockGas}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + } + + if (maxDABlockGas !== undefined) { + for (const block of checkpoint.blocks) { + const blockDAGas = block.computeDAGasUsed(); + if (blockDAGas > maxDABlockGas) { + throw new CheckpointValidationError( + `Block ${block.number} in checkpoint has DA gas used ${blockDAGas} exceeding limit of ${maxDABlockGas}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + } + + if (maxTxsPerBlock !== undefined) { + for (const block of checkpoint.blocks) { + const blockTxCount = block.body.txEffects.length; + if (blockTxCount > maxTxsPerBlock) { + throw new CheckpointValidationError( + `Block ${block.number} in checkpoint has ${blockTxCount} txs exceeding limit of ${maxTxsPerBlock}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + } +} + +/** Validates checkpoint max blob fields, gas limits, and tx limits */ +function validateCheckpointLimits( + checkpoint: Checkpoint, + opts: { + rollupManaLimit?: number; + maxTxsPerCheckpoint?: number; + }, +): void { + const { rollupManaLimit, maxTxsPerCheckpoint } = opts; + + const maxBlobFields = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB; + const maxDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT; + + if (rollupManaLimit !== undefined) { + const checkpointMana = sum(checkpoint.blocks.map(block => block.header.totalManaUsed.toNumber())); + if (checkpointMana > rollupManaLimit) { + throw new CheckpointValidationError( + `Checkpoint mana cost ${checkpointMana} exceeds rollup limit of ${rollupManaLimit}`, + checkpoint.number, + checkpoint.slot, + ); + } + } + + const checkpointDAGas = sum(checkpoint.blocks.map(block => block.computeDAGasUsed())); + if (checkpointDAGas > maxDAGas) { + throw new CheckpointValidationError( + `Checkpoint DA gas cost ${checkpointDAGas} exceeds limit of ${maxDAGas}`, + checkpoint.number, + checkpoint.slot, + ); + } + + const checkpointBlobFields = checkpoint.toBlobFields().length; + if (checkpointBlobFields > maxBlobFields) { + throw new CheckpointValidationError( + `Checkpoint blob field count ${checkpointBlobFields} exceeds limit of ${maxBlobFields}`, + checkpoint.number, + checkpoint.slot, + ); + } + + if (maxTxsPerCheckpoint !== undefined) { + const checkpointTxCount = sum(checkpoint.blocks.map(block => block.body.txEffects.length)); + if (checkpointTxCount > maxTxsPerCheckpoint) { + throw new CheckpointValidationError( + `Checkpoint tx count ${checkpointTxCount} exceeds limit of ${maxTxsPerCheckpoint}`, + checkpoint.number, + checkpoint.slot, + ); + } + } +} diff --git a/yarn-project/stdlib/src/config/sequencer-config.ts b/yarn-project/stdlib/src/config/sequencer-config.ts index 31d0eca9458a..77bdfd94ed82 100644 --- a/yarn-project/stdlib/src/config/sequencer-config.ts +++ b/yarn-project/stdlib/src/config/sequencer-config.ts @@ -1,4 +1,4 @@ -import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/config'; +import type { ConfigMappingsType } from '@aztec/foundation/config'; import type { SequencerConfig } from '../interfaces/configs.js'; @@ -32,6 +32,6 @@ export const sharedSequencerConfigMappings: ConfigMappingsType< maxTxsPerBlock: { env: 'SEQ_MAX_TX_PER_BLOCK', description: 'The maximum number of txs to include in a block.', - ...numberConfigHelper(DEFAULT_MAX_TXS_PER_BLOCK), + parseEnv: (val: string) => (val ? parseInt(val, 10) : undefined), }, }; diff --git a/yarn-project/stdlib/src/interfaces/block-builder.ts b/yarn-project/stdlib/src/interfaces/block-builder.ts index b5b5ea9a4c1a..5765f09e4001 100644 --- a/yarn-project/stdlib/src/interfaces/block-builder.ts +++ b/yarn-project/stdlib/src/interfaces/block-builder.ts @@ -50,7 +50,19 @@ export interface PublicProcessorValidator { export type FullNodeBlockBuilderConfig = Pick & Pick & - Pick; + Pick< + SequencerConfig, + | 'txPublicSetupAllowList' + | 'fakeProcessingDelayPerTxMs' + | 'fakeThrowAfterProcessingTxCount' + | 'maxTxsPerBlock' + | 'maxTxsPerCheckpoint' + | 'maxL2BlockGas' + | 'maxDABlockGas' + > & { + /** Total L2 gas (mana) allowed per checkpoint. Fetched from L1 getManaLimit(). */ + rollupManaLimit: number; + }; export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[] = [ 'l1GenesisTime', @@ -60,6 +72,11 @@ export const FullNodeBlockBuilderConfigKeys: (keyof FullNodeBlockBuilderConfig)[ 'txPublicSetupAllowList', 'fakeProcessingDelayPerTxMs', 'fakeThrowAfterProcessingTxCount', + 'maxTxsPerBlock', + 'maxTxsPerCheckpoint', + 'maxL2BlockGas', + 'maxDABlockGas', + 'rollupManaLimit', ] as const; /** Thrown when no valid transactions are available to include in a block after processing, and this is not the first block in a checkpoint. */ diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index 88b1366a6889..8adae5d1fce8 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -13,6 +13,8 @@ export interface SequencerConfig { sequencerPollingIntervalMS?: number; /** The maximum number of txs to include in a block. */ maxTxsPerBlock?: number; + /** The maximum number of txs across all blocks in a checkpoint. */ + maxTxsPerCheckpoint?: number; /** The minimum number of txs to include in a block. */ minTxsPerBlock?: number; /** The minimum number of valid txs (after execution) to include in a block. If not set, falls back to minTxsPerBlock. */ @@ -79,12 +81,15 @@ export interface SequencerConfig { minBlocksForCheckpoint?: number; /** Skip publishing checkpoint proposals probability (for testing checkpoint prunes only) */ skipPublishingCheckpointsPercent?: number; + /** Multiplier for per-block budget computation (default 2). */ + gasPerBlockAllocationMultiplier?: number; } export const SequencerConfigSchema = zodFor()( z.object({ sequencerPollingIntervalMS: z.number().optional(), maxTxsPerBlock: z.number().optional(), + maxTxsPerCheckpoint: z.number().optional(), minValidTxsPerBlock: z.number().optional(), minTxsPerBlock: z.number().optional(), maxL2BlockGas: z.number().optional(), @@ -118,6 +123,7 @@ export const SequencerConfigSchema = zodFor()( skipPushProposedBlocksToArchiver: z.boolean().optional(), minBlocksForCheckpoint: z.number().positive().optional(), skipPublishingCheckpointsPercent: z.number().gte(0).lte(100).optional(), + gasPerBlockAllocationMultiplier: z.number().positive().optional(), }), ); @@ -134,7 +140,12 @@ type SequencerConfigOptionalKeys = | 'l1PublishingTime' | 'txPublicSetupAllowList' | 'minValidTxsPerBlock' - | 'minBlocksForCheckpoint'; + | 'minBlocksForCheckpoint' + | 'maxTxsPerBlock' + | 'maxTxsPerCheckpoint' + | 'maxL2BlockGas' + | 'maxDABlockGas' + | 'gasPerBlockAllocationMultiplier'; export type ResolvedSequencerConfig = Prettify< Required> & Pick diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index ceffb21c01a8..6a04b4e8bbb5 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -425,10 +425,13 @@ export async function mockCheckpointAndMessages( Partial[1]> = {}, ) { const slotNumber = options.slotNumber ?? SlotNumber(Number(checkpointNumber) * 10); + const globals = GlobalVariables.random({ slotNumber, ...options }); const blocksAndMessages = []; + // Track the previous block's archive to ensure consecutive blocks have consistent archive roots. // The current block's header.lastArchive must equal the previous block's archive. let lastArchive: AppendOnlyTreeSnapshot | undefined = previousArchive; + // Pass maxEffects via txOptions so it reaches TxEffect.random const txOptions = maxEffects !== undefined ? { maxEffects } : {}; for (let i = 0; i < (blocks?.length ?? numBlocks); i++) { @@ -437,11 +440,11 @@ export async function mockCheckpointAndMessages( block: blocks?.[i] ?? (await L2Block.random(blockNumber, { + ...globals, checkpointNumber, indexWithinCheckpoint: IndexWithinCheckpoint(i), txsPerBlock: numTxsPerBlock, txOptions, - slotNumber, ...options, ...makeBlockOptions(blockNumber), ...(lastArchive ? { lastArchive } : {}), @@ -455,12 +458,18 @@ export async function mockCheckpointAndMessages( const messages = blocksAndMessages[0].messages; const inHash = computeInHashFromL1ToL2Messages(messages); - const checkpoint = await Checkpoint.random(checkpointNumber, { numBlocks: 0, slotNumber, inHash, ...options }); + const firstBlockLastArchive = blocksAndMessages[0].block.header.lastArchive; + const checkpoint = await Checkpoint.random(checkpointNumber, { + numBlocks: 0, + inHash, + ...options, + ...globals, + lastArchive: firstBlockLastArchive, + lastArchiveRoot: firstBlockLastArchive.root, + archive: lastArchive, + }); + checkpoint.blocks = blocksAndMessages.map(({ block }) => block); - // Set the checkpoint's archive to match the last block's archive for proper chaining. - // When the archiver reconstructs checkpoints from L1, it uses the checkpoint's archive root - // from the L1 event to set the last block's archive. Without this, the archive chain breaks. - checkpoint.archive = lastArchive!; // Return lastArchive so callers can chain it across multiple checkpoints return { checkpoint, messages, lastArchive }; diff --git a/yarn-project/stdlib/src/tx/block_header.ts b/yarn-project/stdlib/src/tx/block_header.ts index 1a1457e0f96e..6788df00ca98 100644 --- a/yarn-project/stdlib/src/tx/block_header.ts +++ b/yarn-project/stdlib/src/tx/block_header.ts @@ -176,6 +176,12 @@ export class BlockHeader { this._cachedHash = Promise.resolve(new BlockHash(hashed)); } + /** Recomputes the cached hash. Used for testing when header fields are mutated via unfreeze. */ + recomputeHash(): Promise { + this._cachedHash = undefined; + return this.hash(); + } + static random(overrides: Partial> & Partial> = {}): BlockHeader { return BlockHeader.from({ lastArchive: AppendOnlyTreeSnapshot.random(), diff --git a/yarn-project/validator-client/README.md b/yarn-project/validator-client/README.md index bb232bc28184..c6891987fd6f 100644 --- a/yarn-project/validator-client/README.md +++ b/yarn-project/validator-client/README.md @@ -223,6 +223,44 @@ This is useful for monitoring network health without participating in consensus. - `createCheckpointProposal(...)` → `CheckpointProposal`: Signs checkpoint proposal - `attestToCheckpointProposal(proposal, attestors)` → `CheckpointAttestation[]`: Creates attestations for given addresses +## Block Building Limits + +L1 enforces gas and blob capacity per checkpoint. The node enforces these during block building to avoid L1 rejection. Three dimensions are metered: L2 gas (mana), DA gas, and blob fields. DA gas maps to blob fields today (`daGas = blobFields * 32`) but both are tracked independently. + +### Checkpoint limits + +| Dimension | Source | Budget | +| --- | --- | --- | +| L2 gas (mana) | `rollup.getManaLimit()` | Fetched from L1 at startup | +| DA gas | `MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT` | 786,432 (6 blobs × 4096 fields × 32 gas/field) | +| Blob fields | `BLOBS_PER_CHECKPOINT × FIELDS_PER_BLOB` | 24,576 minus checkpoint/block-end overhead | + +### Per-block budgets + +Per-block budgets prevent one block from consuming the entire checkpoint budget. + +**Proposer**: `computeBlockLimits()` derives budgets at startup as `min(checkpointLimit, ceil(checkpointLimit / maxBlocks * multiplier))`, where `maxBlocks` comes from the timetable and `multiplier` defaults to 2. The multiplier greater than 1 allows early blocks to use more than their even share of the checkpoint budget, since different blocks hit different limit dimensions (L2 gas, DA gas, blob fields) — a strict even split would waste capacity. Operators can override via `SEQ_MAX_L2_BLOCK_GAS` / `SEQ_MAX_DA_BLOCK_GAS` / `SEQ_MAX_TX_PER_BLOCK` (capped at checkpoint limits). Per-block TX limits follow the same derivation pattern when `SEQ_MAX_TX_PER_CHECKPOINT` is set. + +**Validator**: Does not enforce per-block gas budgets. Only checkpoint-level limits are checked, so that proposers can freely distribute capacity across blocks within a checkpoint. + +**Checkpoint-level capping**: `CheckpointBuilder.capLimitsByCheckpointBudgets()` always runs before tx processing, capping per-block limits by `checkpointBudget - sum(used by prior blocks)` for all three gas dimensions and for transaction count (when `SEQ_MAX_TX_PER_CHECKPOINT` is set). This applies to both proposer and validator paths. + +### Per-transaction enforcement + +**Mempool entry** (`GasLimitsValidator`): L2 gas must be ≤ `MAX_PROCESSABLE_L2_GAS` (6,540,000) and ≥ fixed minimums. + +**Block building** (`PublicProcessor.process`): Before processing, txs are skipped if their estimated blob fields or gas limits would exceed the block budget. After processing, actual values are checked and the tx is reverted if limits are exceeded. + +### Gas limit configuration + +| Variable | Default | Description | +| --- | --- | --- | +| `SEQ_MAX_L2_BLOCK_GAS` | *auto* | Per-block L2 gas. Auto-derived from `rollupManaLimit / maxBlocks * multiplier`. | +| `SEQ_MAX_DA_BLOCK_GAS` | *auto* | Per-block DA gas. Auto-derived from checkpoint DA limit / maxBlocks * multiplier. | +| `SEQ_MAX_TX_PER_BLOCK` | *none* | Per-block tx count. If `SEQ_MAX_TX_PER_CHECKPOINT` is set and per-block is not, derived as `ceil(checkpointLimit / maxBlocks * multiplier)`. | +| `SEQ_MAX_TX_PER_CHECKPOINT` | *none* | Total txs across all blocks in a checkpoint. When set, per-block tx limit is derived from it (unless explicitly overridden) and checkpoint-level capping is enforced. | +| `SEQ_GAS_PER_BLOCK_ALLOCATION_MULTIPLIER` | 2 | Multiplier for per-block budget computation. | + ## Testing Patterns ### Common Mocks diff --git a/yarn-project/validator-client/src/checkpoint_builder.test.ts b/yarn-project/validator-client/src/checkpoint_builder.test.ts index 38945d92aa4e..baaf96a8cbdd 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.test.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.test.ts @@ -1,3 +1,5 @@ +import { NUM_CHECKPOINT_END_MARKER_FIELDS, getNumBlockEndBlobFields } from '@aztec/blob-lib/encoding'; +import { BLOBS_PER_CHECKPOINT, DA_GAS_PER_FIELD, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; import { BlockNumber, CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -12,6 +14,7 @@ import { type FullNodeBlockBuilderConfig, type MerkleTreeWriteOperations, NoValidTxsError, + type PublicProcessorLimits, type PublicProcessorValidator, } from '@aztec/stdlib/interfaces/server'; import type { CheckpointGlobalVariables, GlobalVariables, ProcessedTx, Tx } from '@aztec/stdlib/tx'; @@ -51,6 +54,48 @@ describe('CheckpointBuilder', () => { public override makeBlockBuilderDeps(_globalVariables: GlobalVariables, _fork: MerkleTreeWriteOperations) { return Promise.resolve({ processor, validator }); } + + public testCapLimits(opts: PublicProcessorLimits) { + return this.capLimitsByCheckpointBudgets(opts); + } + } + + function setupBuilder(overrides: Partial = {}) { + config = { + l1GenesisTime: 0n, + slotDuration: 24, + l1ChainId: 1, + rollupVersion: 1, + rollupManaLimit: 200_000_000, + ...overrides, + }; + checkpointBuilder = new TestCheckpointBuilder( + lightweightCheckpointBuilder as unknown as LightweightCheckpointBuilder, + fork, + config, + contractDataSource, + dateProvider, + telemetryClient, + ); + } + + function createMockBlock(opts: { + manaUsed: number; + txBlobFields: number[]; + blockBlobFieldCount: number; + }): L2Block { + const { manaUsed, txBlobFields, blockBlobFieldCount } = opts; + const block = { + header: { + totalManaUsed: { toNumber: () => manaUsed }, + }, + body: { + txEffects: txBlobFields.map(() => ({})), + }, + computeDAGasUsed: () => txBlobFields.reduce((acc, f) => acc + f * DA_GAS_PER_FIELD, 0), + toBlobFields: () => new Array(blockBlobFieldCount).fill(Fr.ZERO), + } as unknown as L2Block; + return block; } beforeEach(() => { @@ -62,6 +107,7 @@ describe('CheckpointBuilder', () => { slotDuration: 24, l1ChainId: 1, rollupVersion: 1, + rollupManaLimit: 200_000_000, }; contractDataSource = mock(); dateProvider = new TestDateProvider(); @@ -147,4 +193,243 @@ describe('CheckpointBuilder', () => { expect(lightweightCheckpointBuilder.addBlock).not.toHaveBeenCalled(); }); }); + + describe('capLimitsByCheckpointBudgets', () => { + const totalBlobCapacity = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB - NUM_CHECKPOINT_END_MARKER_FIELDS; + const firstBlockEndOverhead = getNumBlockEndBlobFields(true); + const nonFirstBlockEndOverhead = getNumBlockEndBlobFields(false); + + it('caps L2 gas by remaining checkpoint mana', () => { + const rollupManaLimit = 1_000_000; + const priorManaUsed = 600_000; + setupBuilder({ rollupManaLimit }); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 800_000) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 1_000_000 - 600_000 = 400_000. Per-block = 800_000. Capped to 400_000. + expect(capped.maxBlockGas!.l2Gas).toBe(400_000); + }); + + it('uses per-block L2 gas limit when tighter than remaining mana', () => { + const rollupManaLimit = 1_000_000; + const priorManaUsed = 200_000; + setupBuilder({ rollupManaLimit }); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 500_000) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 800_000. Per-block = 500_000. Uses 500_000. + expect(capped.maxBlockGas!.l2Gas).toBe(500_000); + }); + + it('uses per-block L2 gas limit when remaining mana is larger', () => { + setupBuilder(); // rollupManaLimit defaults to 200_000_000 + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 100_000, txBlobFields: [10], blockBlobFieldCount: 20 }), + ]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, 500_000) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 200_000_000 - 100_000 >> 500_000, so per-block limit is used + expect(capped.maxBlockGas!.l2Gas).toBe(500_000); + }); + + it('caps DA gas by remaining checkpoint DA gas budget', () => { + // Each prior tx blob field = DA_GAS_PER_FIELD DA gas + const txBlobFields = [1000]; // 1000 fields * 32 = 32000 DA gas + const priorDAGas = 1000 * DA_GAS_PER_FIELD; + setupBuilder(); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields, blockBlobFieldCount: 1010 }), + ]); + + const perBlockDAGas = 500_000; + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(perBlockDAGas, Infinity) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining DA gas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - priorDAGas + const expectedRemainingDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - priorDAGas; + expect(capped.maxBlockGas!.daGas).toBe(Math.min(perBlockDAGas, expectedRemainingDAGas)); + }); + + it('sets maxBlockGas from remaining budgets when caller does not provide it', () => { + const rollupManaLimit = 1_000_000; + const priorManaUsed = 600_000; + setupBuilder({ rollupManaLimit }); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: priorManaUsed, txBlobFields: [100], blockBlobFieldCount: 110 }), + ]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + expect(capped.maxBlockGas!.l2Gas).toBe(400_000); + expect(capped.maxBlockGas!.daGas).toBe(MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - 100 * DA_GAS_PER_FIELD); + }); + + it('caps blob fields by remaining checkpoint blob capacity', () => { + const blockBlobFieldCount = 100; // Prior block used 100 blob fields + setupBuilder(); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount }), + ]); + + const opts: PublicProcessorLimits = { maxBlobFields: 99999 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Second block: remaining = totalBlobCapacity - 100, minus non-first block end overhead + const expectedMaxBlobFields = totalBlobCapacity - blockBlobFieldCount - nonFirstBlockEndOverhead; + expect(capped.maxBlobFields).toBe(expectedMaxBlobFields); + }); + + it('sets blob fields from remaining capacity when caller does not set them', () => { + setupBuilder(); + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // First block: full capacity minus first block end overhead + const expectedMaxBlobFields = totalBlobCapacity - firstBlockEndOverhead; + expect(capped.maxBlobFields).toBe(expectedMaxBlobFields); + }); + + it('accumulates limits across multiple prior blocks', () => { + const rollupManaLimit = 1_000_000; + setupBuilder({ rollupManaLimit }); + + const block1 = createMockBlock({ manaUsed: 300_000, txBlobFields: [200], blockBlobFieldCount: 210 }); + const block2 = createMockBlock({ manaUsed: 200_000, txBlobFields: [150], blockBlobFieldCount: 160 }); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1, block2]); + + const opts: PublicProcessorLimits = { maxBlockGas: new Gas(Infinity, Infinity) }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining mana = 1_000_000 - 300_000 - 200_000 = 500_000 + expect(capped.maxBlockGas!.l2Gas).toBe(500_000); + + // Remaining DA gas = MAX - (200 + 150) * DA_GAS_PER_FIELD + const expectedRemainingDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - (200 + 150) * DA_GAS_PER_FIELD; + expect(capped.maxBlockGas!.daGas).toBe(expectedRemainingDAGas); + + // Remaining blob fields = capacity - 210 - 160 - nonFirstBlockEndOverhead + const expectedBlobFields = totalBlobCapacity - 210 - 160 - nonFirstBlockEndOverhead; + expect(capped.maxBlobFields).toBe(expectedBlobFields); + }); + + it('tracks remaining blob field capacity across multiple blocks', () => { + setupBuilder(); + + const block1BlobFieldCount = 200; + const block2BlobFieldCount = 150; + + // After one block has been built, remaining capacity should account for that block's usage + const block1 = createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount: block1BlobFieldCount }); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1]); + + const afterOneBlock = (checkpointBuilder as TestCheckpointBuilder).testCapLimits({}); + + const expectedAfterOneBlock = totalBlobCapacity - block1BlobFieldCount - nonFirstBlockEndOverhead; + expect(afterOneBlock.maxBlobFields).toBe(expectedAfterOneBlock); + + // After two blocks have been built, remaining capacity should further decrease + const block2 = createMockBlock({ manaUsed: 0, txBlobFields: [], blockBlobFieldCount: block2BlobFieldCount }); + lightweightCheckpointBuilder.getBlocks.mockReturnValue([block1, block2]); + + const afterTwoBlocks = (checkpointBuilder as TestCheckpointBuilder).testCapLimits({}); + + const expectedAfterTwoBlocks = + totalBlobCapacity - block1BlobFieldCount - block2BlobFieldCount - nonFirstBlockEndOverhead; + expect(afterTwoBlocks.maxBlobFields).toBe(expectedAfterTwoBlocks); + + // Verify the limit actually decreased between calls + expect(afterTwoBlocks.maxBlobFields).toBeLessThan(afterOneBlock.maxBlobFields!); + expect(afterOneBlock.maxBlobFields! - afterTwoBlocks.maxBlobFields!).toBe(block2BlobFieldCount); + }); + + it('caps transaction count by remaining checkpoint tx budget', () => { + setupBuilder({ maxTxsPerCheckpoint: 20 }); + + // Prior block with 3 txs (each with 10 blob fields) + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10], blockBlobFieldCount: 40 }), + ]); + + const opts: PublicProcessorLimits = { maxTransactions: 15 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 20 - 3 = 17. Per-block = 15. Capped to min(15, 17) = 15. + expect(capped.maxTransactions).toBe(15); + }); + + it('caps transaction count when remaining budget is smaller than per-block limit', () => { + setupBuilder({ maxTxsPerCheckpoint: 10 }); + + // Two prior blocks with 4 txs each = 8 total + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10], blockBlobFieldCount: 50 }), + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10], blockBlobFieldCount: 50 }), + ]); + + const opts: PublicProcessorLimits = { maxTransactions: 5 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 10 - 8 = 2. Per-block = 5. Capped to min(5, 2) = 2. + expect(capped.maxTransactions).toBe(2); + }); + + it('sets transaction count from remaining budget when caller does not provide it', () => { + setupBuilder({ maxTxsPerCheckpoint: 15 }); + + // Prior block with 5 txs + lightweightCheckpointBuilder.getBlocks.mockReturnValue([ + createMockBlock({ manaUsed: 0, txBlobFields: [10, 10, 10, 10, 10], blockBlobFieldCount: 60 }), + ]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Remaining txs = 15 - 5 = 10 + expect(capped.maxTransactions).toBe(10); + }); + + it('does not cap transaction count when maxTxsPerCheckpoint is not set', () => { + setupBuilder(); // no maxTxsPerCheckpoint + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = { maxTransactions: 99 }; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Passthrough: maxTransactions = 99 + expect(capped.maxTransactions).toBe(99); + }); + + it('does not cap transaction count when maxTxsPerCheckpoint is not set and caller does not provide it', () => { + setupBuilder(); // no maxTxsPerCheckpoint + + lightweightCheckpointBuilder.getBlocks.mockReturnValue([]); + + const opts: PublicProcessorLimits = {}; + const capped = (checkpointBuilder as TestCheckpointBuilder).testCapLimits(opts); + + // Neither config nor caller sets it, so it remains undefined + expect(capped.maxTransactions).toBeUndefined(); + }); + }); }); diff --git a/yarn-project/validator-client/src/checkpoint_builder.ts b/yarn-project/validator-client/src/checkpoint_builder.ts index 9d26252c0a23..2e75c6653a61 100644 --- a/yarn-project/validator-client/src/checkpoint_builder.ts +++ b/yarn-project/validator-client/src/checkpoint_builder.ts @@ -1,5 +1,7 @@ +import { NUM_CHECKPOINT_END_MARKER_FIELDS, getNumBlockEndBlobFields } from '@aztec/blob-lib/encoding'; +import { BLOBS_PER_CHECKPOINT, FIELDS_PER_BLOB, MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT } from '@aztec/constants'; import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; -import { merge, pick } from '@aztec/foundation/collection'; +import { merge, pick, sum } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log'; import { bufferToHex } from '@aztec/foundation/string'; @@ -94,8 +96,14 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { }); const { processor, validator } = await this.makeBlockBuilderDeps(globalVariables, this.fork); + // Cap gas limits and available blob fields by remaining checkpoint-level budgets + const cappedOpts: PublicProcessorLimits & { expectedEndState?: StateReference } = { + ...opts, + ...this.capLimitsByCheckpointBudgets(opts), + }; + const [publicProcessorDuration, [processedTxs, failedTxs, usedTxs, _, usedTxBlobFields]] = await elapsed(() => - processor.process(pendingTxs, opts, validator), + processor.process(pendingTxs, cappedOpts, validator), ); // Throw if we didn't collect a single valid tx and we're not allowed to build empty blocks @@ -147,6 +155,61 @@ export class CheckpointBuilder implements ICheckpointBlockBuilder { return this.checkpointBuilder.clone().completeCheckpoint(); } + /** + * Caps per-block gas and blob field limits by remaining checkpoint-level budgets. + * Computes remaining L2 gas (mana), DA gas, and blob fields from blocks already added to the checkpoint, + * then returns opts with maxBlockGas and maxBlobFields capped accordingly. + */ + protected capLimitsByCheckpointBudgets( + opts: PublicProcessorLimits, + ): Pick { + const existingBlocks = this.checkpointBuilder.getBlocks(); + + // Remaining L2 gas (mana) + // IMPORTANT: This assumes mana is computed solely based on L2 gas used in transactions. + // This may change in the future. + const usedMana = sum(existingBlocks.map(b => b.header.totalManaUsed.toNumber())); + const remainingMana = this.config.rollupManaLimit - usedMana; + + // Remaining DA gas + const usedDAGas = sum(existingBlocks.map(b => b.computeDAGasUsed())) ?? 0; + const remainingDAGas = MAX_PROCESSABLE_DA_GAS_PER_CHECKPOINT - usedDAGas; + + // Remaining blob fields (block blob fields include both tx data and block-end overhead) + const usedBlobFields = sum(existingBlocks.map(b => b.toBlobFields().length)); + const totalBlobCapacity = BLOBS_PER_CHECKPOINT * FIELDS_PER_BLOB - NUM_CHECKPOINT_END_MARKER_FIELDS; + const isFirstBlock = existingBlocks.length === 0; + const blockEndOverhead = getNumBlockEndBlobFields(isFirstBlock); + const maxBlobFieldsForTxs = totalBlobCapacity - usedBlobFields - blockEndOverhead; + + // Cap L2 gas by remaining checkpoint mana + const cappedL2Gas = Math.min(opts.maxBlockGas?.l2Gas ?? remainingMana, remainingMana); + + // Cap DA gas by remaining checkpoint DA gas budget + const cappedDAGas = Math.min(opts.maxBlockGas?.daGas ?? remainingDAGas, remainingDAGas); + + // Cap blob fields by remaining checkpoint blob capacity + const cappedBlobFields = + opts.maxBlobFields !== undefined ? Math.min(opts.maxBlobFields, maxBlobFieldsForTxs) : maxBlobFieldsForTxs; + + // Cap transaction count by remaining checkpoint tx budget + let cappedMaxTransactions: number | undefined; + if (this.config.maxTxsPerCheckpoint !== undefined) { + const usedTxs = sum(existingBlocks.map(b => b.body.txEffects.length)); + const remainingTxs = Math.max(0, this.config.maxTxsPerCheckpoint - usedTxs); + cappedMaxTransactions = + opts.maxTransactions !== undefined ? Math.min(opts.maxTransactions, remainingTxs) : remainingTxs; + } else { + cappedMaxTransactions = opts.maxTransactions; + } + + return { + maxBlockGas: new Gas(cappedDAGas, cappedL2Gas), + maxBlobFields: cappedBlobFields, + maxTransactions: cappedMaxTransactions, + }; + } + protected async makeBlockBuilderDeps(globalVariables: GlobalVariables, fork: MerkleTreeWriteOperations) { const txPublicSetupAllowList = this.config.txPublicSetupAllowList ?? (await getDefaultAllowedSetupFunctions()); const contractsDB = new PublicContractsDB(this.contractDataSource, this.log.getBindings()); diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts index 5370ba592af5..afdd5722c9da 100644 --- a/yarn-project/validator-client/src/validator.ha.integration.test.ts +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -192,6 +192,7 @@ describe('ValidatorClient HA Integration', () => { const metrics = new ValidatorMetrics(getTelemetryClient()); const blockProposalValidator = new BlockProposalValidator(epochCache, { txsPermitted: true, + maxTxsPerBlock: undefined, }); const blockProposalHandler = new BlockProposalHandler( checkpointsBuilder, diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 000000000000..fb57ccd13afb --- /dev/null +++ b/yarn.lock @@ -0,0 +1,4 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + +