Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion yarn-project/sequencer-client/src/sequencer/sequencer.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import type { L2Block } from '@aztec/aztec.js';
import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
import { BLOBS_PER_BLOCK, FIELDS_PER_BLOB, INITIAL_L2_BLOCK_NUM } from '@aztec/constants';
import type { EpochCache } from '@aztec/epoch-cache';
import { FormattedViemError, NoCommitteeError, type RollupContract } from '@aztec/ethereum';
import { omit, pick } from '@aztec/foundation/collection';
Expand Down Expand Up @@ -581,6 +581,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter<Sequ
maxTransactions: this.maxTxsPerBlock,
maxBlockSize: this.maxBlockSizeInBytes,
maxBlockGas: this.maxBlockGas,
maxBlobFields: BLOBS_PER_BLOCK * FIELDS_PER_BLOB,
deadline,
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,28 @@ describe('public_processor', () => {
expect(failed).toEqual([]);
});

it('does not exceed max blob fields limit', async function () {
// Create 3 private-only transactions
const txs = await Promise.all(Array.from([1, 2, 3], seed => mockPrivateOnlyTx({ seed })));

// First, let's process one transaction to see how many blob fields it actually has
const [testProcessed] = await processor.process([txs[0]]);
const actualBlobFields = testProcessed[0].txEffect.toBlobFields().length;

// Set the limit to allow only 2 transactions
// If each tx has `actualBlobFields` fields, we set limit to allow 2 but not 3
const maxBlobFields = actualBlobFields * 2;

// Process all 3 transactions with the blob field limit
const [processed, failed] = await processor.process(txs, { maxBlobFields });

// Should only process 2 transactions due to blob field limit
expect(processed.length).toBe(2);
expect(processed[0].hash).toEqual(txs[0].getTxHash());
expect(processed[1].hash).toEqual(txs[1].getTxHash());
expect(failed).toEqual([]);
});

it('does not send a transaction to the prover if pre validation fails', async function () {
const tx = await mockPrivateOnlyTx();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ export class PublicProcessor implements Traceable {
limits: PublicProcessorLimits = {},
validator: PublicProcessorValidator = {},
): Promise<[ProcessedTx[], FailedTx[], Tx[], NestedProcessReturnValues[]]> {
const { maxTransactions, maxBlockSize, deadline, maxBlockGas } = limits;
const { maxTransactions, maxBlockSize, deadline, maxBlockGas, maxBlobFields } = limits;
const { preprocessValidator, nullifierCache } = validator;
const result: ProcessedTx[] = [];
const usedTxs: Tx[] = [];
Expand All @@ -165,6 +165,7 @@ export class PublicProcessor implements Traceable {
let returns: NestedProcessReturnValues[] = [];
let totalPublicGas = new Gas(0, 0);
let totalBlockGas = new Gas(0, 0);
let totalBlobFields = 0;

for await (const origTx of txs) {
// Only process up to the max tx limit
Expand Down Expand Up @@ -252,6 +253,23 @@ export class PublicProcessor implements Traceable {
continue;
}

// If the actual blob fields of this tx would exceed the limit, skip it
const txBlobFields = processedTx.txEffect.toBlobFields().length;
if (maxBlobFields !== undefined && totalBlobFields + txBlobFields > maxBlobFields) {
this.log.debug(
`Skipping processed tx ${txHash} with ${txBlobFields} blob fields due to max blob fields limit.`,
{
txHash,
txBlobFields,
totalBlobFields,
maxBlobFields,
},
);
// Need to revert the checkpoint here and don't go any further
await checkpoint.revert();
continue;
}

// FIXME(fcarreiro): it's ugly to have to notify the validator of nullifiers.
// I'd rather pass the validators the processedTx as well and let them deal with it.
nullifierCache?.addNullifiers(processedTx.txEffect.nullifiers.map(n => n.toBuffer()));
Expand All @@ -262,6 +280,7 @@ export class PublicProcessor implements Traceable {
totalPublicGas = totalPublicGas.add(processedTx.gasUsed.publicGas);
totalBlockGas = totalBlockGas.add(processedTx.gasUsed.totalGas);
totalSizeInBytes += txSize;
totalBlobFields += txBlobFields;
} catch (err: any) {
if (err?.name === 'PublicProcessorTimeoutError') {
this.log.warn(`Stopping tx processing due to timeout.`);
Expand Down
1 change: 1 addition & 0 deletions yarn-project/stdlib/src/interfaces/block-builder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ export interface PublicProcessorLimits {
maxTransactions?: number;
maxBlockSize?: number;
maxBlockGas?: Gas;
maxBlobFields?: number;
deadline?: Date;
}

Expand Down
Loading