diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts index 15c58af5267e..c7dfed8a34fd 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts @@ -35,6 +35,24 @@ describe('e2e_epochs/epochs_multi_proof', () => { const proverIds = test.proverNodes.map(prover => EthAddress.fromField(prover.getProverId())); logger.info(`Prover nodes running with ids ${proverIds.map(id => id.toString()).join(', ')}`); + // Add a delay to prover nodes so not all txs land on the same place + test.proverNodes.forEach((prover, index) => { + const proverManager = prover.getProver(); + const origCreateEpochProver = proverManager.createEpochProver.bind(proverManager); + proverManager.createEpochProver = () => { + const epochProver = origCreateEpochProver(); + const origFinaliseEpoch = epochProver.finaliseEpoch.bind(epochProver); + epochProver.finaliseEpoch = async () => { + const result = await origFinaliseEpoch(); + const sleepTime = index * 1000 * test.constants.ethereumSlotDuration; + logger.warn(`Delaying finaliseEpoch for prover node ${index} by ${sleepTime}ms`); + await sleep(sleepTime); + return result; + }; + return epochProver; + }; + }); + // Wait until the start of epoch one and collect info on epoch zero await test.waitUntilEpochStarts(1); await sleep(L1_BLOCK_TIME_IN_S * 1000); diff --git a/yarn-project/prover-node/src/prover-node-publisher.test.ts b/yarn-project/prover-node/src/prover-node-publisher.test.ts new file mode 100644 index 000000000000..c47bb587efd3 --- /dev/null +++ b/yarn-project/prover-node/src/prover-node-publisher.test.ts @@ -0,0 +1,178 @@ +import { AGGREGATION_OBJECT_LENGTH } from '@aztec/constants'; +import type { L1TxUtils, RollupContract } from '@aztec/ethereum'; +import { times } from '@aztec/foundation/collection'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { Fr } from '@aztec/foundation/fields'; +import type { PublisherConfig, TxSenderConfig } from '@aztec/sequencer-client'; +import { Proof } from '@aztec/stdlib/proofs'; +import { RootRollupPublicInputs } from '@aztec/stdlib/rollup'; + +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { ProverNodePublisher } from './prover-node-publisher.js'; + +describe('prover-node-publisher', () => { + // Prover publisher dependencies + let rollup: MockProxy; + let l1Utils: MockProxy; + + let publisher: ProverNodePublisher; + + let config: TxSenderConfig & PublisherConfig; + + beforeEach(() => { + rollup = mock(); + l1Utils = mock(); + + config = { + l1ChainId: 1, + l1RpcUrls: ['http://localhost:8545'], + publisherPrivateKey: '0x1234', + l1PublishRetryIntervalMS: 1000, + viemPollingIntervalMS: 1000, + customForwarderContractAddress: EthAddress.random(), + l1Contracts: { + rollupAddress: EthAddress.random(), + registryAddress: EthAddress.random(), + inboxAddress: EthAddress.random(), + outboxAddress: EthAddress.random(), + rewardDistributorAddress: EthAddress.random(), + feeJuicePortalAddress: EthAddress.random(), + coinIssuerAddress: EthAddress.random(), + governanceAddress: EthAddress.random(), + governanceProposerAddress: EthAddress.random(), + feeJuiceAddress: EthAddress.random(), + stakingAssetAddress: EthAddress.random(), + }, + }; + }); + + beforeEach(() => { + publisher = new ProverNodePublisher(config, { rollupContract: rollup, l1TxUtils: l1Utils }); + }); + + const testCases = [ + // Usual case of proving full epoch + { pendingBlockNumber: 65n, provenBlockNumber: 32n, fromBlock: 33, toBlock: 64, expectedPublish: true, message: '' }, + // Failure case of proving beyond the pending chain + { + pendingBlockNumber: 65n, + provenBlockNumber: 32n, + fromBlock: 33, + toBlock: 66, + expectedPublish: false, + message: 'Cannot submit epoch proof for 33-66 as pending block is 65', + }, + // Some successful partial epochs + { pendingBlockNumber: 33n, provenBlockNumber: 32n, fromBlock: 33, toBlock: 33, expectedPublish: true, message: '' }, + { pendingBlockNumber: 65n, provenBlockNumber: 32n, fromBlock: 33, toBlock: 38, expectedPublish: true, message: '' }, + { pendingBlockNumber: 40n, provenBlockNumber: 32n, fromBlock: 33, toBlock: 33, expectedPublish: true, message: '' }, + + // Somebody else proved the entire epoch already + + // We try and prove the full epoch - succeeds + { pendingBlockNumber: 65n, provenBlockNumber: 64n, fromBlock: 33, toBlock: 64, expectedPublish: true, message: '' }, + + // We try and prove a partial epoch that falls short of the end - fails as pointless to publish + { + pendingBlockNumber: 65n, + provenBlockNumber: 64n, + fromBlock: 33, + toBlock: 35, + expectedPublish: false, + message: 'Cannot submit epoch proof for 33-35 as proven block is 64', + }, + + // Somebody else partially proved the epoch already + + // We try and prove the rest of the epoch - succeeds + { pendingBlockNumber: 65n, provenBlockNumber: 40n, fromBlock: 41, toBlock: 64, expectedPublish: true, message: '' }, + + // We try and prove all of the epoch - succeeds + { pendingBlockNumber: 65n, provenBlockNumber: 40n, fromBlock: 33, toBlock: 64, expectedPublish: true, message: '' }, + + // We try and partially prove the epoch after their proof - succeeds again + { pendingBlockNumber: 65n, provenBlockNumber: 40n, fromBlock: 41, toBlock: 45, expectedPublish: true, message: '' }, + + // We try and partially prove the epoch on top of their proof - succeeds again + { pendingBlockNumber: 65n, provenBlockNumber: 40n, fromBlock: 33, toBlock: 45, expectedPublish: true, message: '' }, + + // We try and partially prove the epoch and partially on top of their proof - succeeds again + { pendingBlockNumber: 65n, provenBlockNumber: 40n, fromBlock: 35, toBlock: 45, expectedPublish: true, message: '' }, + + // We try and partially prove the epoch but less than was already proven - fails as pointless + { + pendingBlockNumber: 65n, + provenBlockNumber: 40n, + fromBlock: 33, + toBlock: 39, + expectedPublish: false, + message: 'Cannot submit epoch proof for 33-39 as proven block is 40', + }, + + // We try and partially prove the epoch but the same as was already proven - should possibly fail but succeeds for now, quite an edge case + { + pendingBlockNumber: 65n, + provenBlockNumber: 40n, + fromBlock: 33, + toBlock: 40, + expectedPublish: true, + }, + ]; + + test.each(testCases)( + 'submits proof for epoch with pendingBlock: $pendingBlockNumber, provenBlock: $provenBlockNumber, fromBlock: $fromBlock, toBlock: $toBlock', + async ({ pendingBlockNumber, provenBlockNumber, fromBlock, toBlock, expectedPublish, message }) => { + // Create public inputs for every block + const blocks = Array.from({ length: 100 }, () => { + return RootRollupPublicInputs.random(); + }); + + // Return the tips specified by the test + rollup.getTips.mockResolvedValue({ + pendingBlockNumber, + provenBlockNumber, + }); + + // Return the requested block + rollup.getBlock.mockImplementation((blockNumber: bigint) => + Promise.resolve({ + blockHash: blocks[Number(blockNumber) - 1].endBlockHash.toString(), + archive: blocks[Number(blockNumber) - 1].endArchive.root.toString(), + slotNumber: 0n, // unused, + }), + ); + + // We have built a rollup proof of the range fromBlock - toBlock + // so we need to set our archives and hashes accordingly + const ourPublicInputs = RootRollupPublicInputs.random(); + ourPublicInputs.previousBlockHash = blocks[fromBlock - 2]?.endBlockHash ?? Fr.ZERO; + ourPublicInputs.previousArchive = blocks[fromBlock - 2]?.endArchive ?? Fr.ZERO; + ourPublicInputs.endBlockHash = blocks[toBlock - 1]?.endBlockHash ?? Fr.ZERO; + ourPublicInputs.endArchive = blocks[toBlock - 1]?.endArchive ?? Fr.ZERO; + + // Return our public inputs + const totalFields = ourPublicInputs.toFields().concat(times(AGGREGATION_OBJECT_LENGTH, Fr.zero)); + rollup.getEpochProofPublicInputs.mockResolvedValue(totalFields.map(x => x.toString())); + + const result = await publisher + .submitEpochProof({ + epochNumber: 2, + fromBlock, + toBlock, + publicInputs: ourPublicInputs, + proof: Proof.empty(), + }) + .then(() => 'Success') + .catch(error => error.message); + + if (expectedPublish) { + expect(result).toBe('Success'); + expect(l1Utils.sendAndMonitorTransaction).toHaveBeenCalled(); + } else { + expect(result).toBe(message); + expect(l1Utils.sendAndMonitorTransaction).not.toHaveBeenCalled(); + } + }, + ); +}); diff --git a/yarn-project/prover-node/src/prover-node-publisher.ts b/yarn-project/prover-node/src/prover-node-publisher.ts index 510567c04cbc..3d81026f3107 100644 --- a/yarn-project/prover-node/src/prover-node-publisher.ts +++ b/yarn-project/prover-node/src/prover-node-publisher.ts @@ -150,15 +150,17 @@ export class ProverNodePublisher { // Check that the block numbers match the expected epoch to be proven const { pendingBlockNumber: pending, provenBlockNumber: proven } = await this.rollupContract.getTips(); - if (proven !== BigInt(fromBlock) - 1n) { + // Don't publish if proven is beyond our toBlock, pointless to do so + if (proven > BigInt(toBlock)) { throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as proven block is ${proven}`); } + // toBlock can't be greater than pending if (toBlock > pending) { throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as pending block is ${pending}`); } // Check the block hash and archive for the immediate block before the epoch - const blockLog = await this.rollupContract.getBlock(proven); + const blockLog = await this.rollupContract.getBlock(BigInt(fromBlock - 1)); if (publicInputs.previousArchive.root.toString() !== blockLog.archive) { throw new Error( `Previous archive root mismatch: ${publicInputs.previousArchive.root.toString()} !== ${blockLog.archive}`,