diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index 7c93846242fa..e919555ed20d 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -4,10 +4,8 @@ "type": "module", "exports": { ".": "./dest/index.js", - "./data-retrieval": "./dest/archiver/data_retrieval.js", - "./epoch": "./dest/archiver/epoch_helpers.js", "./test": "./dest/test/index.js", - "./config": "./dest/archiver/config.js" + "./config": "./dest/config.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/archiver/src/archiver-store.test.ts b/yarn-project/archiver/src/archiver-store.test.ts new file mode 100644 index 000000000000..ded7cc51d5bd --- /dev/null +++ b/yarn-project/archiver/src/archiver-store.test.ts @@ -0,0 +1,488 @@ +import type { BlobClientInterface } from '@aztec/blob-client/client'; +import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants'; +import type { EpochCache, EpochCommitteeInfo } from '@aztec/epoch-cache'; +import { RollupContract } from '@aztec/ethereum/contracts'; +import type { ViemPublicClient } from '@aztec/ethereum/types'; +import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import { L2BlockNew } from '@aztec/stdlib/block'; +import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; +import { makeStateReference } from '@aztec/stdlib/testing'; +import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; +import { getTelemetryClient } from '@aztec/telemetry-client'; + +import { EventEmitter } from 'events'; +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { Archiver, type ArchiverEmitter } from './archiver.js'; +import { InitialBlockNumberNotSequentialError } from './errors.js'; +import type { ArchiverInstrumentation } from './modules/instrumentation.js'; +import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; +import { KVArchiverDataStore } from './store/kv_archiver_store.js'; +import { makeChainedCheckpoints } from './test/mock_structs.js'; + +describe('Archiver Store', () => { + const rollupAddress = EthAddress.random(); + const registryAddress = EthAddress.random(); + const governanceProposerAddress = EthAddress.random(); + const slashFactoryAddress = EthAddress.random(); + const slashingProposerAddress = EthAddress.random(); + + let publicClient: MockProxy; + let debugClient: MockProxy; + let instrumentation: MockProxy; + let blobClient: MockProxy; + let epochCache: MockProxy; + let archiverStore: KVArchiverDataStore; + let l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }; + let archiver: Archiver; + + beforeEach(async () => { + const now = +new Date(); + + publicClient = mock(); + debugClient = publicClient; + blobClient = mock(); + epochCache = mock(); + epochCache.getCommitteeForEpoch.mockResolvedValue({ committee: [] as EthAddress[] } as EpochCommitteeInfo); + + const rollupContract = mock(); + Object.defineProperty(rollupContract, 'address', { value: rollupAddress.toString(), writable: true }); + + const tracer = getTelemetryClient().getTracer(''); + instrumentation = mock({ isEnabled: () => true, tracer }); + + archiverStore = new KVArchiverDataStore(await openTmpStore('archiver_test'), 1000); + + l1Constants = { + l1GenesisTime: BigInt(now), + l1StartBlock: 0n, + l1StartBlockHash: Buffer32.random(), + epochDuration: 4, + slotDuration: 24, + ethereumSlotDuration: 12, + proofSubmissionEpochs: 1, + genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT), + }; + + const contractAddresses = { + registryAddress, + governanceProposerAddress, + slashFactoryAddress, + slashingProposerAddress, + }; + + const config = { + pollingIntervalMs: 1000, + batchSize: 1000, + maxAllowedEthClientDriftSeconds: 300, + ethereumAllowNoDebugHosts: true, + }; + + const events = new EventEmitter() as ArchiverEmitter; + const synchronizer = mock(); + + archiver = new Archiver( + publicClient, + debugClient, + rollupContract, + contractAddresses, + archiverStore, + config, + blobClient, + instrumentation, + l1Constants, + synchronizer, + events, + ); + }); + + afterEach(async () => { + await archiver?.stop(); + }); + + describe('getPublishedCheckpoints', () => { + it('returns published checkpoints with full checkpoint data', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + const result = await archiver.getPublishedCheckpoints(CheckpointNumber(1), 10); + + expect(result.length).toBe(3); + expect(result.map(c => c.checkpoint.number)).toEqual([1, 2, 3]); + result.forEach((pc, i) => { + expect(pc.checkpoint.blocks.length).toBeGreaterThan(0); + expect(pc.checkpoint.archive.root.toString()).toEqual(testCheckpoints[i].checkpoint.archive.root.toString()); + expect(pc.l1).toBeDefined(); + }); + }); + + it('respects the limit parameter', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + const result = await archiver.getPublishedCheckpoints(CheckpointNumber(1), 2); + + expect(result.length).toBe(2); + expect(result.map(c => c.checkpoint.number)).toEqual([1, 2]); + }); + + it('respects the starting checkpoint number', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + const result = await archiver.getPublishedCheckpoints(CheckpointNumber(2), 10); + + expect(result.length).toBe(2); + expect(result.map(c => c.checkpoint.number)).toEqual([2, 3]); + }); + + it('returns empty array when no checkpoints exist', async () => { + const result = await archiver.getPublishedCheckpoints(CheckpointNumber(1), 10); + + expect(result).toEqual([]); + }); + }); + + describe('getCheckpointsForEpoch', () => { + it('returns checkpoints for a specific epoch based on slot numbers', async () => { + // l1Constants has epochDuration: 4, so epoch 0 has slots 0-3, epoch 1 has slots 4-7 + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { + previousArchive: genesisArchive, + makeCheckpointOptions: cpNumber => { + // Checkpoint 1 & 2 in epoch 0 (slots 0-3), checkpoint 3 in epoch 1 (slots 4-7) + const slotNumbers: Record = { 1: SlotNumber(1), 2: SlotNumber(3), 3: SlotNumber(5) }; + return { slotNumber: slotNumbers[Number(cpNumber)] }; + }, + }); + await archiverStore.addCheckpoints(testCheckpoints); + + const epoch0Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(0)); + expect(epoch0Checkpoints.length).toBe(2); + expect(epoch0Checkpoints.map(c => c.number)).toEqual([1, 2]); + + const epoch1Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(1)); + expect(epoch1Checkpoints.length).toBe(1); + expect(epoch1Checkpoints.map(c => c.number)).toEqual([3]); + }); + + it('returns empty array for epoch with no checkpoints', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(1, { + previousArchive: genesisArchive, + makeCheckpointOptions: () => ({ slotNumber: SlotNumber(2) }), // Epoch 0 + }); + await archiverStore.addCheckpoints(testCheckpoints); + + const epoch1Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(1)); + expect(epoch1Checkpoints).toEqual([]); + }); + + it('returns checkpoints in correct order (ascending by checkpoint number)', async () => { + // Create multiple checkpoints all in epoch 0 + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { + previousArchive: genesisArchive, + makeCheckpointOptions: cpNumber => { + // All in epoch 0 (slots 0-3) + const slotNumbers: Record = { 1: SlotNumber(0), 2: SlotNumber(1), 3: SlotNumber(2) }; + return { slotNumber: slotNumbers[Number(cpNumber)] }; + }, + }); + await archiverStore.addCheckpoints(testCheckpoints); + + const epoch0Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(0)); + expect(epoch0Checkpoints.length).toBe(3); + expect(epoch0Checkpoints.map(c => c.number)).toEqual([1, 2, 3]); + }); + }); + + describe('addBlock (L2BlockSink)', () => { + // State reference needs to be valid for LogStore's dataStartIndexForBlock calculation + // All blocks use checkpoint number 1 since they're being added to the initial checkpoint + const makeBlock = (blockNumber: BlockNumber, indexIntoCheckpoint = 0, previousArchive?: AppendOnlyTreeSnapshot) => + L2BlockNew.random(blockNumber, { + checkpointNumber: CheckpointNumber(1), + state: makeStateReference(0x100), + indexWithinCheckpoint: indexIntoCheckpoint, + ...(previousArchive ? { lastArchive: previousArchive } : {}), + }); + + // Genesis archive for the first block + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + + it('adds a block to the store', async () => { + const block = await makeBlock(BlockNumber(1), 0, genesisArchive); + await archiver.addBlock(block); + + const retrievedBlock = await archiver.getL2BlockNew(BlockNumber(1)); + expect(retrievedBlock).toBeDefined(); + expect(retrievedBlock!.number).toEqual(BlockNumber(1)); + expect((await retrievedBlock!.header.hash()).toString()).toEqual((await block.header.hash()).toString()); + }); + + it('adds multiple blocks incrementally', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); + const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); + + await archiver.addBlock(block1); + await archiver.addBlock(block2); + await archiver.addBlock(block3); + + const retrievedBlock1 = await archiver.getL2BlockNew(BlockNumber(1)); + const retrievedBlock2 = await archiver.getL2BlockNew(BlockNumber(2)); + const retrievedBlock3 = await archiver.getL2BlockNew(BlockNumber(3)); + + expect(retrievedBlock1!.number).toEqual(BlockNumber(1)); + expect(retrievedBlock2!.number).toEqual(BlockNumber(2)); + expect(retrievedBlock3!.number).toEqual(BlockNumber(3)); + }); + + it('rejects blocks with non-incremental block number (gap)', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + const block3 = await makeBlock(BlockNumber(3), 2, block1.archive); // Skip block 2 + + await archiver.addBlock(block1); + + // Block 3 should be rejected because block 2 is missing + await expect(archiver.addBlock(block3)).rejects.toThrow(InitialBlockNumberNotSequentialError); + }); + + it('rejects blocks with duplicate block numbers', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); + + await archiver.addBlock(block1); + await archiver.addBlock(block2); + + // Adding block 2 again shoud be rejected + await expect(archiver.addBlock(block2)).rejects.toThrow(InitialBlockNumberNotSequentialError); + }); + + it('rejects first block if not starting from block 1', async () => { + const block5 = await makeBlock(BlockNumber(5), 0, genesisArchive); + + // First block must be block 1 + await expect(archiver.addBlock(block5)).rejects.toThrow(); + }); + + it('allows block number to start from 1 (initial block)', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + + await archiver.addBlock(block1); + + const retrievedBlock = await archiver.getL2BlockNew(BlockNumber(1)); + expect(retrievedBlock).toBeDefined(); + expect(retrievedBlock!.number).toEqual(BlockNumber(1)); + }); + + it('retrieves multiple blocks with getL2BlocksNew', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); + const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); + + await archiver.addBlock(block1); + await archiver.addBlock(block2); + await archiver.addBlock(block3); + + const blocks = await archiver.getL2BlocksNew(BlockNumber(1), 3); + expect(blocks.length).toEqual(3); + expect(await blocks[0].hash()).toEqual(await block1.hash()); + expect(await blocks[1].hash()).toEqual(await block2.hash()); + expect(await blocks[2].hash()).toEqual(await block3.hash()); + }); + + it('retrieves blocks with limit in getL2BlocksNew', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); + const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); + + await archiver.addBlock(block1); + await archiver.addBlock(block2); + await archiver.addBlock(block3); + + // Request only 2 blocks starting from block 1 + const blocks = await archiver.getL2BlocksNew(BlockNumber(1), 2); + expect(blocks.length).toEqual(2); + expect(await blocks[0].hash()).toEqual(await block1.hash()); + expect(await blocks[1].hash()).toEqual(await block2.hash()); + }); + + it('retrieves blocks starting from middle with getL2BlocksNew', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); + const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); + + await archiver.addBlock(block1); + await archiver.addBlock(block2); + await archiver.addBlock(block3); + + // Start from block 2 + const blocks = await archiver.getL2BlocksNew(BlockNumber(2), 2); + expect(blocks.length).toEqual(2); + expect(await blocks[0].hash()).toEqual(await block2.hash()); + expect(await blocks[1].hash()).toEqual(await block3.hash()); + }); + + it('returns empty array when requesting blocks beyond available range', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + + await archiver.addBlock(block1); + + // Request blocks starting from block 5 (which doesn't exist) + const blocks = await archiver.getL2BlocksNew(BlockNumber(5), 3); + expect(blocks).toEqual([]); + }); + + it('returns partial results when limit exceeds available blocks', async () => { + const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); + const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); + + await archiver.addBlock(block1); + await archiver.addBlock(block2); + + // Request 10 blocks but only 2 are available + const blocks = await archiver.getL2BlocksNew(BlockNumber(1), 10); + expect(blocks.length).toEqual(2); + expect(await blocks[0].hash()).toEqual(await block1.hash()); + expect(await blocks[1].hash()).toEqual(await block2.hash()); + }); + }); + + describe('getCheckpointedBlocks', () => { + it('returns checkpointed blocks with checkpoint info', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + const result = await archiver.getCheckpointedBlocks(BlockNumber(1), 100); + + const expectedBlocks = testCheckpoints.flatMap(c => c.checkpoint.blocks); + expect(result.length).toBe(expectedBlocks.length); + + // Verify blocks are returned with correct checkpoint info + let blockIndex = 0; + for (let cpIdx = 0; cpIdx < testCheckpoints.length; cpIdx++) { + const checkpoint = testCheckpoints[cpIdx]; + for (let i = 0; i < checkpoint.checkpoint.blocks.length; i++) { + const cb = result[blockIndex]; + const expectedBlock = checkpoint.checkpoint.blocks[i]; + + expect(cb.block.number).toBe(expectedBlock.number); + expect(cb.checkpointNumber).toBe(checkpoint.checkpoint.number); + expect(cb.block.archive.root.toString()).toBe(expectedBlock.archive.root.toString()); + expect(cb.l1).toBeDefined(); + expect(cb.l1.blockNumber).toBeGreaterThan(0n); + + blockIndex++; + } + } + }); + + it('respects the limit parameter', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + const result = await archiver.getCheckpointedBlocks(BlockNumber(1), 2); + + expect(result.length).toBe(2); + expect(result[0].block.number).toBe(BlockNumber(1)); + expect(result[1].block.number).toBe(BlockNumber(2)); + expect(result[0].checkpointNumber).toBe(1); + expect(result[1].checkpointNumber).toBe(2); + }); + + it('returns blocks starting from specified block number', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + const result = await archiver.getCheckpointedBlocks(BlockNumber(2), 10); + + expect(result.length).toBe(2); + expect(result[0].block.number).toBe(BlockNumber(2)); + expect(result[1].block.number).toBe(BlockNumber(3)); + expect(result[0].checkpointNumber).toBe(2); + expect(result[1].checkpointNumber).toBe(3); + }); + + it('returns empty array when no checkpointed blocks exist', async () => { + const result = await archiver.getCheckpointedBlocks(BlockNumber(1), 10); + + expect(result).toEqual([]); + }); + + it('filters by proven status when proven=true', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + // Set checkpoint 1 as proven + await archiverStore.setProvenCheckpointNumber(CheckpointNumber(1)); + + // Get all blocks + const allBlocks = await archiver.getCheckpointedBlocks(BlockNumber(1), 100); + expect(allBlocks.length).toBe(3); + + // Get only proven blocks (checkpoint 1 only) + const provenBlocks = await archiver.getCheckpointedBlocks(BlockNumber(1), 100, true); + expect(provenBlocks.length).toBe(1); + expect(provenBlocks[0].checkpointNumber).toBe(1); + expect(provenBlocks[0].block.number).toBe(BlockNumber(1)); + }); + }); + + describe('getL2BlocksNew with proven filter', () => { + it('filters by proven status when proven=true', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + // Set checkpoint 1 as proven + await archiverStore.setProvenCheckpointNumber(CheckpointNumber(1)); + + // Get all blocks + const allBlocks = await archiver.getL2BlocksNew(BlockNumber(1), 100); + expect(allBlocks.length).toBe(3); + + // Get only proven blocks (checkpoint 1 only) + const provenBlocks = await archiver.getL2BlocksNew(BlockNumber(1), 100, true); + expect(provenBlocks.length).toBe(1); + expect(provenBlocks[0].number).toBe(BlockNumber(1)); + + // Verify unproven blocks are not included + const unprovenBlockNumbers = [BlockNumber(2), BlockNumber(3)]; + provenBlocks.forEach(b => { + expect(unprovenBlockNumbers).not.toContain(b.number); + }); + }); + + it('returns all blocks when proven=false or undefined', async () => { + const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); + const testCheckpoints = await makeChainedCheckpoints(3, { previousArchive: genesisArchive }); + await archiverStore.addCheckpoints(testCheckpoints); + + // Set checkpoint 1 as proven + await archiverStore.setProvenCheckpointNumber(CheckpointNumber(1)); + + // Get blocks with proven=false - should include all blocks + const blocksProvenFalse = await archiver.getL2BlocksNew(BlockNumber(1), 100, false); + expect(blocksProvenFalse.length).toBe(3); + expect(blocksProvenFalse.map(b => b.number)).toEqual([BlockNumber(1), BlockNumber(2), BlockNumber(3)]); + + // Get blocks with proven=undefined - should include all blocks + const blocksProvenUndefined = await archiver.getL2BlocksNew(BlockNumber(1), 100); + expect(blocksProvenUndefined.length).toBe(3); + expect(blocksProvenUndefined.map(b => b.number)).toEqual([BlockNumber(1), BlockNumber(2), BlockNumber(3)]); + }); + }); +}); diff --git a/yarn-project/archiver/src/archiver/archiver.sync.test.ts b/yarn-project/archiver/src/archiver-sync.test.ts similarity index 79% rename from yarn-project/archiver/src/archiver/archiver.sync.test.ts rename to yarn-project/archiver/src/archiver-sync.test.ts index 9ce2ebf718d4..a7b980609de1 100644 --- a/yarn-project/archiver/src/archiver/archiver.sync.test.ts +++ b/yarn-project/archiver/src/archiver-sync.test.ts @@ -5,7 +5,7 @@ import type { EpochCache, EpochCommitteeInfo } from '@aztec/epoch-cache'; import { DefaultL1ContractsConfig } from '@aztec/ethereum/config'; import { BlockTagTooOldError, type InboxContract, type RollupContract } from '@aztec/ethereum/contracts'; import type { ViemPublicClient } from '@aztec/ethereum/types'; -import { CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Buffer32 } from '@aztec/foundation/buffer'; import { sum, times } from '@aztec/foundation/collection'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; @@ -22,12 +22,14 @@ import { getTelemetryClient } from '@aztec/telemetry-client'; import { jest } from '@jest/globals'; import assert from 'assert'; +import { EventEmitter } from 'events'; import { type MockProxy, mock } from 'jest-mock-extended'; import type { GetBlockReturnType } from 'viem'; -import { Archiver } from './archiver.js'; -import type { ArchiverInstrumentation } from './instrumentation.js'; -import { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; +import { Archiver, type ArchiverEmitter } from './archiver.js'; +import type { ArchiverInstrumentation } from './modules/instrumentation.js'; +import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; +import { KVArchiverDataStore } from './store/kv_archiver_store.js'; import { FakeL1State, type FakeL1StateConfig } from './test/fake_l1_state.js'; describe('Archiver Sync', () => { @@ -49,7 +51,9 @@ describe('Archiver Sync', () => { let archiverStore: KVArchiverDataStore; let l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }; let archiver: Archiver; + let synchronizer: ArchiverL1Synchronizer; let logger: Logger; + let syncLogger: Logger; let now: number; const GENESIS_ROOT = new Fr(GENESIS_ARCHIVE_ROOT); @@ -57,6 +61,7 @@ describe('Archiver Sync', () => { beforeEach(async () => { logger = createLogger('archiver:sync:test'); + syncLogger = createLogger('archiver:l1-sync:test'); now = Math.floor(Date.now() / 1000); dateProvider = new TestDateProvider(); @@ -109,24 +114,47 @@ describe('Archiver Sync', () => { rollupContract = fake.createMockRollupContract(publicClient); inboxContract = fake.createMockInboxContract(publicClient); - archiver = new Archiver( + const config = { + pollingIntervalMs: 1000, + batchSize: 1000, + maxAllowedEthClientDriftSeconds: 300, + ethereumAllowNoDebugHosts: true, + }; + + // Create event emitter shared by archiver and synchronizer + const events = new EventEmitter() as ArchiverEmitter; + + // Create the L1 synchronizer + synchronizer = new ArchiverL1Synchronizer( + publicClient, publicClient, - publicClient, // debugClient same as publicClient for tests rollupContract, inboxContract, contractAddresses, archiverStore, - { - pollingIntervalMs: 1000, - batchSize: 1000, - maxAllowedEthClientDriftSeconds: 300, - ethereumAllowNoDebugHosts: true, - }, + config, blobClient, epochCache, dateProvider, instrumentation, l1Constants, + events, + instrumentation.tracer, + syncLogger, + ); + + archiver = new Archiver( + publicClient, + publicClient, + rollupContract, + contractAddresses, + archiverStore, + config, + blobClient, + instrumentation, + l1Constants, + synchronizer, + events, ); }); @@ -217,7 +245,7 @@ describe('Archiver Sync', () => { }, 30_000); it('ignores checkpoint 3 because it has been pruned', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'warn'); + const loggerSpy = jest.spyOn(syncLogger, 'warn'); expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0)); @@ -311,7 +339,7 @@ describe('Archiver Sync', () => { }, 10_000); it('skip event search if no changes found', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'debug'); + const loggerSpy = jest.spyOn(syncLogger, 'debug'); expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0)); @@ -755,7 +783,7 @@ describe('Archiver Sync', () => { describe('reorg handling', () => { it('handles L2 reorg', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'debug'); + const loggerSpy = jest.spyOn(syncLogger, 'debug'); expect(await archiver.getCheckpointNumber()).toEqual(CheckpointNumber(0)); @@ -929,4 +957,193 @@ describe('Archiver Sync', () => { xit('does not attempt to download data for a checkpoint that has been pruned', () => {}); }); + + describe('addBlock and L1 sync interaction', () => { + it('blocks added via addBlock become checkpointed when checkpoint syncs from L1', async () => { + // First, sync checkpoint 1 from L1 to establish a baseline + const { checkpoint: cp1 } = await fake.addCheckpoint(CheckpointNumber(1), { + l1BlockNumber: 70n, + messagesL1BlockNumber: 60n, + numL1ToL2Messages: 3, + }); + + fake.setL1BlockNumber(100n); + await archiver.start(false); + await retryUntil( + () => archiver.getSynchedCheckpointNumber().then(n => n === CheckpointNumber(1)), + 'sync', + 10, + 0.1, + ); + + expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); + const lastBlockInCheckpoint1 = cp1.blocks[cp1.blocks.length - 1].number; + + // Verify L2Tips after syncing checkpoint 1: proposed and checkpointed should both be at checkpoint 1 + const tipsAfterCheckpoint1 = await archiver.getL2Tips(); + expect(tipsAfterCheckpoint1.proposed.number).toEqual(lastBlockInCheckpoint1); + expect(tipsAfterCheckpoint1.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); + expect(tipsAfterCheckpoint1.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); + + // Create checkpoint 2 on L1 at a future block (not yet visible to archiver) + const { checkpoint: cp2 } = await fake.addCheckpoint(CheckpointNumber(2), { + l1BlockNumber: 5000n, // Far in the future + messagesL1BlockNumber: 4990n, + numL1ToL2Messages: 3, + }); + + // Now add blocks from checkpoint 2 via addBlock (simulating local block production) + for (const block of cp2.blocks) { + await archiver.addBlock(block); + } + + // Verify blocks are retrievable but not yet checkpointed + const lastBlockInCheckpoint2 = cp2.blocks[cp2.blocks.length - 1].number; + expect(await archiver.getBlockNumber()).toEqual(lastBlockInCheckpoint2); + expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); + + // Verify L2Tips after adding blocks: proposed advances but checkpointed stays at checkpoint 1 + const tipsAfterAddBlock = await archiver.getL2Tips(); + expect(tipsAfterAddBlock.proposed.number).toEqual(lastBlockInCheckpoint2); + expect(tipsAfterAddBlock.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); + expect(tipsAfterAddBlock.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); + + // getCheckpointedBlock should return undefined for the new blocks since checkpoint 2 hasn't synced + const firstNewBlockNumber = BlockNumber(lastBlockInCheckpoint1 + 1); + const uncheckpointedBlock = await archiver.getCheckpointedBlock(firstNewBlockNumber); + expect(uncheckpointedBlock).toBeUndefined(); + + // But getL2BlockNew should work (it retrieves both checkpointed and uncheckpointed blocks) + const block = await archiver.getL2BlockNew(firstNewBlockNumber); + expect(block).toBeDefined(); + + // Now advance L1 so checkpoint 2 becomes visible + fake.setL1BlockNumber(5010n); + + await retryUntil( + () => archiver.getSynchedCheckpointNumber().then(n => n === CheckpointNumber(2)), + 'sync', + 10, + 0.1, + ); + + // Now the blocks should be checkpointed + expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(2)); + + // Verify L2Tips after syncing checkpoint 2: proposed and checkpointed should both be at checkpoint 2 + const tipsAfterCheckpoint2 = await archiver.getL2Tips(); + expect(tipsAfterCheckpoint2.proposed.number).toEqual(lastBlockInCheckpoint2); + expect(tipsAfterCheckpoint2.checkpointed.block.number).toEqual(lastBlockInCheckpoint2); + expect(tipsAfterCheckpoint2.checkpointed.checkpoint.number).toEqual(CheckpointNumber(2)); + + // getCheckpointedBlock should now work for the new blocks + const checkpointedBlock = await archiver.getCheckpointedBlock(firstNewBlockNumber); + expect(checkpointedBlock).toBeDefined(); + expect(checkpointedBlock!.checkpointNumber).toEqual(2); + }, 10_000); + + it('blocks added via checkpoints can not be added via addBlocks', async () => { + // First, sync checkpoint 1 from L1 to establish a baseline + const { checkpoint: cp1 } = await fake.addCheckpoint(CheckpointNumber(1), { + l1BlockNumber: 70n, + messagesL1BlockNumber: 60n, + numL1ToL2Messages: 3, + }); + + fake.setL1BlockNumber(100n); + await archiver.start(false); + await retryUntil( + () => archiver.getSynchedCheckpointNumber().then(n => n === CheckpointNumber(1)), + 'sync', + 10, + 0.1, + ); + + expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); + const blockAlreadySyncedFromCheckpoint = cp1.blocks[cp1.blocks.length - 1]; + + // Now try and add one of the blocks via the addBlocks method. It should throw + await expect(archiver.addBlock(blockAlreadySyncedFromCheckpoint)).rejects.toThrow(); + }, 10_000); + + it('can add more blocks after checkpoint syncs and then sync another checkpoint', async () => { + // Sync the first checkpoint normally + const { checkpoint: cp1 } = await fake.addCheckpoint(CheckpointNumber(1), { + l1BlockNumber: 70n, + messagesL1BlockNumber: 60n, + numL1ToL2Messages: 3, + }); + + fake.setL1BlockNumber(100n); + await archiver.start(false); + await retryUntil( + () => archiver.getSynchedCheckpointNumber().then(n => n === CheckpointNumber(1)), + 'sync', + 10, + 0.1, + ); + + expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); + const lastBlockInCheckpoint1 = cp1.blocks[cp1.blocks.length - 1].number; + + // Verify L2Tips after syncing checkpoint 1: proposed and checkpointed at checkpoint 1 + const tipsAfterCheckpoint1 = await archiver.getL2Tips(); + expect(tipsAfterCheckpoint1.proposed.number).toEqual(lastBlockInCheckpoint1); + expect(tipsAfterCheckpoint1.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); + expect(tipsAfterCheckpoint1.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); + + // Create checkpoint 2 on L1 at a future block (not yet visible) + const { checkpoint: cp2 } = await fake.addCheckpoint(CheckpointNumber(2), { + l1BlockNumber: 5000n, // Far in the future + messagesL1BlockNumber: 4990n, + numL1ToL2Messages: 3, + }); + + // Now add more blocks via addBlock (simulating local block production ahead of L1) + for (const block of cp2.blocks) { + await archiver.addBlock(block); + } + + // Verify blocks are retrievable + const lastBlockInCheckpoint2 = cp2.blocks[cp2.blocks.length - 1].number; + expect(await archiver.getBlockNumber()).toEqual(lastBlockInCheckpoint2); + + // But checkpoint number should still be 1 + expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); + + // Verify L2Tips after adding blocks: proposed advances, checkpointed stays at checkpoint 1 + const tipsAfterAddBlock = await archiver.getL2Tips(); + expect(tipsAfterAddBlock.proposed.number).toEqual(lastBlockInCheckpoint2); + expect(tipsAfterAddBlock.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); + expect(tipsAfterAddBlock.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); + + // New blocks should not be checkpointed yet + const firstNewBlockNumber = BlockNumber(lastBlockInCheckpoint1 + 1); + const uncheckpointedBlock = await archiver.getCheckpointedBlock(firstNewBlockNumber); + expect(uncheckpointedBlock).toBeUndefined(); + + // Now advance L1 so checkpoint 2 becomes visible + fake.setL1BlockNumber(5010n); + + await retryUntil( + () => archiver.getSynchedCheckpointNumber().then(n => n === CheckpointNumber(2)), + 'sync', + 10, + 0.1, + ); + + // Now all blocks should be checkpointed + expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(2)); + + // Verify L2Tips after syncing checkpoint 2: both proposed and checkpointed at checkpoint 2 + const tipsAfterCheckpoint2 = await archiver.getL2Tips(); + expect(tipsAfterCheckpoint2.proposed.number).toEqual(lastBlockInCheckpoint2); + expect(tipsAfterCheckpoint2.checkpointed.block.number).toEqual(lastBlockInCheckpoint2); + expect(tipsAfterCheckpoint2.checkpointed.checkpoint.number).toEqual(CheckpointNumber(2)); + + const checkpointedBlock = await archiver.getCheckpointedBlock(firstNewBlockNumber); + expect(checkpointedBlock).toBeDefined(); + expect(checkpointedBlock!.checkpointNumber).toEqual(2); + }, 10_000); + }); }); diff --git a/yarn-project/archiver/src/archiver.ts b/yarn-project/archiver/src/archiver.ts new file mode 100644 index 000000000000..db2a2b82eb99 --- /dev/null +++ b/yarn-project/archiver/src/archiver.ts @@ -0,0 +1,522 @@ +import type { BlobClientInterface } from '@aztec/blob-client/client'; +import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; +import { EpochCache } from '@aztec/epoch-cache'; +import { BlockTagTooOldError, RollupContract } from '@aztec/ethereum/contracts'; +import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; +import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types'; +import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { merge } from '@aztec/foundation/collection'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise'; +import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise'; +import { DateProvider } from '@aztec/foundation/timer'; +import { + type ArchiverEmitter, + type CheckpointId, + GENESIS_CHECKPOINT_HEADER_HASH, + L2BlockNew, + type L2BlockSink, + type L2Tips, + type ValidateCheckpointResult, +} from '@aztec/stdlib/block'; +import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { + type L1RollupConstants, + getEpochNumberAtTimestamp, + getSlotAtTimestamp, + getSlotRangeForEpoch, + getTimestampRangeForEpoch, +} from '@aztec/stdlib/epoch-helpers'; +import { type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; + +import { type ArchiverConfig, mapArchiverConfig } from './config.js'; +import { NoBlobBodiesFoundError } from './errors.js'; +import { validateAndLogTraceAvailability } from './l1/validate_trace.js'; +import { ArchiverDataSourceBase } from './modules/data_source_base.js'; +import { ArchiverDataStoreUpdater } from './modules/data_store_updater.js'; +import type { ArchiverInstrumentation } from './modules/instrumentation.js'; +import type { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; +import type { KVArchiverDataStore } from './store/kv_archiver_store.js'; + +/** Export ArchiverEmitter for use in factory and tests. */ +export type { ArchiverEmitter }; + +/** Request to add a block to the archiver, queued for processing by the sync loop. */ +type AddBlockRequest = { + block: L2BlockNew; + resolve: () => void; + reject: (err: Error) => void; +}; + +export type ArchiverDeps = { + telemetry?: TelemetryClient; + blobClient: BlobClientInterface; + epochCache?: EpochCache; + dateProvider?: DateProvider; +}; + +/** + * Pulls checkpoints in a non-blocking manner and provides interface for their retrieval. + * Responsible for handling robust L1 polling so that other components do not need to + * concern themselves with it. + */ +export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Traceable { + /** Event emitter for archiver events (L2BlockProven, L2PruneDetected, etc). */ + public readonly events: ArchiverEmitter; + + /** A loop in which we will be continually fetching new checkpoints. */ + private runningPromise: RunningPromise; + + /** L1 synchronizer that handles fetching checkpoints and messages from L1. */ + private readonly synchronizer: ArchiverL1Synchronizer; + + private initialSyncComplete: boolean = false; + private initialSyncPromise: PromiseWithResolvers; + + /** Queue of blocks to be added to the store, processed by the sync loop. */ + private blockQueue: AddBlockRequest[] = []; + + /** Helper to handle updates to the store */ + private readonly updater: ArchiverDataStoreUpdater; + + public readonly tracer: Tracer; + + /** + * Creates a new instance of the Archiver. + * @param publicClient - A client for interacting with the Ethereum node. + * @param debugClient - A client for interacting with the Ethereum node for debug/trace methods. + * @param rollup - Rollup contract instance. + * @param inbox - Inbox contract instance. + * @param l1Addresses - L1 contract addresses (registry, governance proposer, slash factory, slashing proposer). + * @param dataStore - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data. + * @param config - Archiver configuration options. + * @param blobClient - Client for retrieving blob data. + * @param epochCache - Cache for epoch-related data. + * @param dateProvider - Provider for current date/time. + * @param instrumentation - Instrumentation for metrics and tracing. + * @param l1Constants - L1 rollup constants. + * @param log - A logger. + */ + constructor( + private readonly publicClient: ViemPublicClient, + private readonly debugClient: ViemPublicDebugClient, + private readonly rollup: RollupContract, + private readonly l1Addresses: Pick< + L1ContractAddresses, + 'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress' + > & { slashingProposerAddress: EthAddress }, + readonly dataStore: KVArchiverDataStore, + private config: { + pollingIntervalMs: number; + batchSize: number; + skipValidateCheckpointAttestations?: boolean; + maxAllowedEthClientDriftSeconds: number; + ethereumAllowNoDebugHosts?: boolean; + }, + private readonly blobClient: BlobClientInterface, + instrumentation: ArchiverInstrumentation, + protected override readonly l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, + synchronizer: ArchiverL1Synchronizer, + events: ArchiverEmitter, + private readonly log: Logger = createLogger('archiver'), + ) { + super(dataStore, l1Constants); + + this.tracer = instrumentation.tracer; + this.initialSyncPromise = promiseWithResolvers(); + this.synchronizer = synchronizer; + this.events = events; + this.updater = new ArchiverDataStoreUpdater(this.dataStore); + + // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync + // are done as fast as possible. This then gets updated once the initial sync completes. + this.runningPromise = new RunningPromise( + () => this.sync(), + this.log, + this.config.pollingIntervalMs / 10, + makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError), + ); + } + + /** Updates archiver config */ + public updateConfig(newConfig: Partial) { + this.config = merge(this.config, mapArchiverConfig(newConfig)); + } + + /** + * Starts sync process. + * @param blockUntilSynced - If true, blocks until the archiver has fully synced. + */ + public async start(blockUntilSynced: boolean): Promise { + if (this.runningPromise.isRunning()) { + throw new Error('Archiver is already running'); + } + + await this.blobClient.testSources(); + await this.synchronizer.testEthereumNodeSynced(); + await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false); + + // Log initial state for the archiver + const { l1StartBlock } = this.l1Constants; + const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint(); + const currentL2Checkpoint = await this.getSynchedCheckpointNumber(); + this.log.info( + `Starting archiver sync to rollup contract ${this.rollup.address} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`, + { blocksSynchedTo, messagesSynchedTo, currentL2Checkpoint }, + ); + + // Start sync loop, and return the wait for initial sync if we are asked to block until synced + this.runningPromise.start(); + if (blockUntilSynced) { + return this.waitForInitialSync(); + } + } + + public syncImmediate() { + return this.runningPromise.trigger(); + } + + /** + * Queues a block to be added to the archiver store and triggers processing. + * The block will be processed by the sync loop. + * Implements the L2BlockSink interface. + * @param block - The L2 block to add. + * @returns A promise that resolves when the block has been added to the store, or rejects on error. + */ + public addBlock(block: L2BlockNew): Promise { + return new Promise((resolve, reject) => { + this.blockQueue.push({ block, resolve, reject }); + this.log.debug(`Queued block ${block.number} for processing`); + // Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed + this.syncImmediate().catch(err => { + this.log.error(`Sync immediate call failed: ${err}`); + }); + }); + } + + /** + * Processes all queued blocks, adding them to the store. + * Called at the beginning of each sync iteration. + * Blocks are processed in the order they were queued. + */ + private async processQueuedBlocks(): Promise { + if (this.blockQueue.length === 0) { + return; + } + + // Take all blocks from the queue + const queuedItems = this.blockQueue.splice(0, this.blockQueue.length); + this.log.debug(`Processing ${queuedItems.length} queued block(s)`); + + // Process each block individually to properly resolve/reject each promise + for (const { block, resolve, reject } of queuedItems) { + try { + await this.updater.addBlocksWithContractData([block]); + this.log.debug(`Added block ${block.number} to store`); + resolve(); + } catch (err: any) { + this.log.error(`Failed to add block ${block.number} to store: ${err.message}`); + reject(err); + } + } + } + + public waitForInitialSync() { + return this.initialSyncPromise.promise; + } + + /** + * Fetches logs from L1 contracts and processes them. + */ + @trackSpan('Archiver.sync') + private async sync() { + // Process any queued blocks first, before doing L1 sync + await this.processQueuedBlocks(); + // Now perform L1 sync + await this.syncFromL1(); + } + + private async syncFromL1() { + // Delegate to the L1 synchronizer + await this.synchronizer.syncFromL1(this.initialSyncComplete); + + // Check if we've completed initial sync + const currentL1BlockNumber = this.synchronizer.getL1BlockNumber(); + if (currentL1BlockNumber !== undefined && !this.initialSyncComplete) { + const l1BlockNumberAtEnd = await this.publicClient.getBlockNumber(); + if (currentL1BlockNumber + 1n >= l1BlockNumberAtEnd) { + this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, { + l1BlockNumber: currentL1BlockNumber, + syncPoint: await this.store.getSynchPoint(), + ...(await this.getL2Tips()), + }); + this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs); + this.initialSyncComplete = true; + this.initialSyncPromise.resolve(); + } + } + } + + /** Resumes the archiver after a stop. */ + public resume() { + if (this.runningPromise.isRunning()) { + this.log.warn(`Archiver already running`); + } + this.log.info(`Restarting archiver`); + this.runningPromise.start(); + } + + /** + * Stops the archiver. + * @returns A promise signalling completion of the stop process. + */ + public async stop(): Promise { + this.log.debug('Stopping...'); + await this.runningPromise.stop(); + + this.log.info('Stopped.'); + return Promise.resolve(); + } + + public backupTo(destPath: string): Promise { + return this.dataStore.backupTo(destPath); + } + + public getL1Constants(): Promise { + return Promise.resolve(this.l1Constants); + } + + public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> { + return Promise.resolve({ genesisArchiveRoot: this.l1Constants.genesisArchiveRoot }); + } + + public getRollupAddress(): Promise { + return Promise.resolve(EthAddress.fromString(this.rollup.address)); + } + + public getRegistryAddress(): Promise { + return Promise.resolve(this.l1Addresses.registryAddress); + } + + public getL1BlockNumber(): bigint | undefined { + return this.synchronizer.getL1BlockNumber(); + } + + public getL1Timestamp(): Promise { + return Promise.resolve(this.synchronizer.getL1Timestamp()); + } + + public getL2SlotNumber(): Promise { + const l1Timestamp = this.synchronizer.getL1Timestamp(); + return Promise.resolve(l1Timestamp === undefined ? undefined : getSlotAtTimestamp(l1Timestamp, this.l1Constants)); + } + + public getL2EpochNumber(): Promise { + const l1Timestamp = this.synchronizer.getL1Timestamp(); + return Promise.resolve( + l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(l1Timestamp, this.l1Constants), + ); + } + + public async isEpochComplete(epochNumber: EpochNumber): Promise { + // The epoch is complete if the current L2 block is the last one in the epoch (or later) + const header = await this.getBlockHeader('latest'); + const slot = header ? header.globalVariables.slotNumber : undefined; + const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1Constants); + if (slot && slot >= endSlot) { + return true; + } + + // If we haven't run an initial sync, just return false. + const l1Timestamp = this.synchronizer.getL1Timestamp(); + if (l1Timestamp === undefined) { + return false; + } + + // If not, the epoch may also be complete if the L2 slot has passed without a block + // We compute this based on the end timestamp for the given epoch and the timestamp of the last L1 block + const [_startTimestamp, endTimestamp] = getTimestampRangeForEpoch(epochNumber, this.l1Constants); + + // For this computation, we throw in a few extra seconds just for good measure, + // since we know the next L1 block won't be mined within this range. Remember that + // l1timestamp is the timestamp of the last l1 block we've seen, so this relies on + // the fact that L1 won't mine two blocks within this time of each other. + // TODO(palla/reorg): Is the above a safe assumption? + const leeway = 1n; + return l1Timestamp + leeway >= endTimestamp; + } + + /** Returns whether the archiver has completed an initial sync run successfully. */ + public isInitialSyncComplete(): boolean { + return this.initialSyncComplete; + } + + public unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise { + return this.updater.unwindCheckpointsWithContractData(from, checkpointsToUnwind); + } + + public addCheckpoints( + checkpoints: PublishedCheckpoint[], + pendingChainValidationStatus?: ValidateCheckpointResult, + ): Promise { + return this.updater.addCheckpointsWithContractData(checkpoints, pendingChainValidationStatus); + } + + public async getL2Tips(): Promise { + const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber] = await Promise.all([ + this.getBlockNumber(), + this.getProvenBlockNumber(), + this.getCheckpointedBlockNumber(), + ] as const); + + // TODO(#13569): Compute proper finalized block number based on L1 finalized block. + // We just force it 2 epochs worth of proven data for now. + // NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations + const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1Constants.epochDuration * 2, 0)); + + const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1); + + // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks + const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] = + await Promise.all([ + latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined, + provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined, + finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined, + checkpointedBlockNumber > beforeInitialblockNumber + ? this.getCheckpointedBlock(checkpointedBlockNumber) + : undefined, + ] as const); + + if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) { + throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`); + } + + // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number. + if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) { + throw new Error( + `Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`, + ); + } + + if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) { + throw new Error( + `Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`, + ); + } + + if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) { + throw new Error( + `Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`, + ); + } + + const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; + const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; + const finalizedBlockHeaderHash = + (await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; + const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; + + // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks + const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([ + provenCheckpointedBlock !== undefined + ? await this.getPublishedCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1) + : [undefined], + finalizedCheckpointedBlock !== undefined + ? await this.getPublishedCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1) + : [undefined], + checkpointedBlock !== undefined + ? await this.getPublishedCheckpoints(checkpointedBlock?.checkpointNumber, 1) + : [undefined], + ]); + + const initialcheckpointId: CheckpointId = { + number: CheckpointNumber.ZERO, + hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(), + }; + + const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => { + if (checkpoint === undefined) { + return initialcheckpointId; + } + return { + number: checkpoint.checkpoint.number, + hash: checkpoint.checkpoint.hash().toString(), + }; + }; + + const l2Tips: L2Tips = { + proposed: { + number: latestBlockNumber, + hash: latestBlockHeaderHash.toString(), + }, + proven: { + block: { + number: provenBlockNumber, + hash: provenBlockHeaderHash.toString(), + }, + checkpoint: makeCheckpointId(provenBlockCheckpoint), + }, + finalized: { + block: { + number: finalizedBlockNumber, + hash: finalizedBlockHeaderHash.toString(), + }, + checkpoint: makeCheckpointId(finalizedBlockCheckpoint), + }, + checkpointed: { + block: { + number: checkpointedBlockNumber, + hash: checkpointedBlockHeaderHash.toString(), + }, + checkpoint: makeCheckpointId(checkpointedBlockCheckpoint), + }, + }; + + return l2Tips; + } + + public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise { + // TODO(pw/mbps): This still assumes 1 block per checkpoint + const currentBlocks = await this.getL2Tips(); + const currentL2Block = currentBlocks.proposed.number; + const currentProvenBlock = currentBlocks.proven.block.number; + + if (targetL2BlockNumber >= currentL2Block) { + throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`); + } + const blocksToUnwind = currentL2Block - targetL2BlockNumber; + const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber); + if (!targetL2Block) { + throw new Error(`Target L2 block ${targetL2BlockNumber} not found`); + } + const targetL1BlockNumber = targetL2Block.l1.blockNumber; + const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber); + const targetL1Block = await this.publicClient.getBlock({ + blockNumber: targetL1BlockNumber, + includeTransactions: false, + }); + if (!targetL1Block) { + throw new Error(`Missing L1 block ${targetL1BlockNumber}`); + } + const targetL1BlockHash = Buffer32.fromString(targetL1Block.hash); + this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`); + await this.updater.unwindCheckpointsWithContractData(CheckpointNumber(currentL2Block), blocksToUnwind); + this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`); + await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber); + this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`); + await this.store.setCheckpointSynchedL1BlockNumber(targetL1BlockNumber); + await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash }); + if (targetL2BlockNumber < currentProvenBlock) { + this.log.info(`Clearing proven L2 block number`); + await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO); + } + // TODO(palla/reorg): Set the finalized block when we add support for it. + // if (targetL2BlockNumber < currentFinalizedBlock) { + // this.log.info(`Clearing finalized L2 block number`); + // await this.store.setFinalizedL2BlockNumber(0); + // } + } +} diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts deleted file mode 100644 index 96b5d348e19e..000000000000 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ /dev/null @@ -1,1566 +0,0 @@ -import type { BlobClientInterface } from '@aztec/blob-client/client'; -import { type Blob, getBlobsPerL1Block, getPrefixedEthBlobCommitments } from '@aztec/blob-lib'; -import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants'; -import type { EpochCache, EpochCommitteeInfo } from '@aztec/epoch-cache'; -import { DefaultL1ContractsConfig } from '@aztec/ethereum/config'; -import { InboxContract, MULTI_CALL_3_ADDRESS, RollupContract } from '@aztec/ethereum/contracts'; -import type { ViemPublicClient } from '@aztec/ethereum/types'; -import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; -import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { EthAddress } from '@aztec/foundation/eth-address'; -import { type Logger, createLogger } from '@aztec/foundation/log'; -import { retryUntil } from '@aztec/foundation/retry'; -import { bufferToHex } from '@aztec/foundation/string'; -import { TestDateProvider } from '@aztec/foundation/timer'; -import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; -import { CommitteeAttestation, CommitteeAttestationsAndSigners, L2BlockNew } from '@aztec/stdlib/block'; -import { Checkpoint } from '@aztec/stdlib/checkpoint'; -import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; -import { InboxLeaf } from '@aztec/stdlib/messaging'; -import { - makeAndSignCommitteeAttestationsAndSigners, - makeCheckpointAttestationFromCheckpoint, - makeStateReference, - mockCheckpointAndMessages, -} from '@aztec/stdlib/testing'; -import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; -import { getTelemetryClient } from '@aztec/telemetry-client'; - -import { type MockProxy, mock } from 'jest-mock-extended'; -import { type FormattedBlock, type Log, type Transaction, encodeFunctionData, multicall3Abi, toHex } from 'viem'; - -import { Archiver } from './archiver.js'; -import { InitialBlockNumberNotSequentialError } from './errors.js'; -import type { ArchiverInstrumentation } from './instrumentation.js'; -import { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; -import { updateRollingHash } from './structs/inbox_message.js'; - -interface MockRollupContractRead { - /** Returns the target committee size */ - getTargetCommitteeSize: () => Promise; - /** Returns the rollup version. */ - getVersion: () => Promise; - /** Given a checkpoint number, returns the archive. */ - archiveAt: (args: readonly [bigint]) => Promise<`0x${string}`>; - /** Given a checkpoint number, returns provenCheckpointNumber, provenArchive, pendingCheckpointNumber, pendingArchive, archiveForLocalPendingCheckpointNumber, provenEpochNumber. */ - status: (args: readonly [bigint]) => Promise<[bigint, `0x${string}`, bigint, `0x${string}`, `0x${string}`]>; -} - -interface MockInboxContractRead { - getState: () => Promise<{ rollingHash: `0x${string}`; totalMessagesInserted: bigint; inProgress: bigint }>; -} - -interface MockRollupContractEvents { - CheckpointProposed: ( - filter: any, - range: { fromBlock: bigint; toBlock: bigint }, - ) => Promise[]>; -} - -interface MockInboxContractEvents { - MessageSent: ( - filter: any, - range: { fromBlock: bigint; toBlock: bigint }, - ) => Promise[]>; -} - -describe('Archiver', () => { - const rollupAddress = EthAddress.random(); - const inboxAddress = EthAddress.random(); - const registryAddress = EthAddress.random(); - const governanceProposerAddress = EthAddress.random(); - const slashFactoryAddress = EthAddress.random(); - const slashingProposerAddress = EthAddress.random(); - - const mockL1BlockNumbers = (...l1BlockNumbers: bigint[]) => { - // During each archiver sync, we read the block number 3 times, so this ensures all three reads are consistent across the run. - for (const blockNum of l1BlockNumbers) { - publicClient.getBlockNumber - .mockResolvedValueOnce(blockNum) - .mockResolvedValueOnce(blockNum) - .mockResolvedValueOnce(blockNum); - } - publicClient.getBlockNumber.mockResolvedValue(l1BlockNumbers.at(-1)!); - }; - - const makeCheckpointsAndMessages = async ( - numCheckpoints: number, - { - numBlocksPerCheckpoint = 1, - txsPerBlock = 4, - checkpointStartNumber = CheckpointNumber(1), - blockStartNumber = 1, - numL1ToL2Messages = 3, - maxEffects = 0, - previousArchive, - }: { - numBlocksPerCheckpoint?: number; - txsPerBlock?: number; - checkpointStartNumber?: CheckpointNumber; - blockStartNumber?: number; - numL1ToL2Messages?: number; - maxEffects?: number; - previousArchive?: AppendOnlyTreeSnapshot; - } = {}, - ) => { - // Create checkpoints sequentially to chain archive roots properly. - // Each checkpoint's first block's lastArchive must equal the previous block's archive. - const results: { checkpoint: Checkpoint; messages: Fr[]; lastArchive: AppendOnlyTreeSnapshot | undefined }[] = []; - let lastArchive = previousArchive; - for (let i = 0; i < numCheckpoints; i++) { - const checkpointNumber = CheckpointNumber(i + checkpointStartNumber); - const startBlockNumber = BlockNumber(i * numBlocksPerCheckpoint + blockStartNumber); - const endBlockNumber = BlockNumber(startBlockNumber + numBlocksPerCheckpoint - 1); - const result = await mockCheckpointAndMessages(checkpointNumber, { - startBlockNumber, - numBlocks: numBlocksPerCheckpoint, - txsPerBlock, - numL1ToL2Messages, - timestamp: BigInt(now + Number(ETHEREUM_SLOT_DURATION) * (endBlockNumber + 1)), - previousArchive: lastArchive, - makeBlockOptions: blockNumber => ({ - // State reference can't be random. The nextAvailableLeafIndex of the note hash tree must be big enough to - // avoid error when computing the dataStartIndexForBlock in LogStore. - state: makeStateReference(0x100), - timestamp: BigInt(now + Number(ETHEREUM_SLOT_DURATION) * (blockNumber + 1)), - txOptions: { - numPublicCallsPerTx: blockNumber + 1, - numPublicLogsPerCall: 2, - maxEffects, - }, - makeTxOptions: txIndex => ({ - numPrivateLogs: blockNumber + txIndex, - }), - }), - }); - lastArchive = result.lastArchive; - results.push(result); - } - return results; - }; - - let publicClient: MockProxy; - let debugClient: MockProxy; - let instrumentation: MockProxy; - let blobClient: MockProxy; - let epochCache: MockProxy; - let dateProvider: TestDateProvider; - let archiverStore: KVArchiverDataStore; - let l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }; - let now: number; - - let mockRollupRead: MockProxy; - let mockInboxRead: MockProxy; - let mockRollupEvents: MockProxy; - let mockInboxEvents: MockProxy; - let mockRollup: { - read: typeof mockRollupRead; - getEvents: typeof mockRollupEvents; - address: string; - }; - let mockInbox: { - read: typeof mockInboxRead; - getEvents: typeof mockInboxEvents; - }; - let archiver: Archiver; - - let checkpoints: Checkpoint[]; - let messagesPerCheckpoint: Fr[][]; - let messagesRollingHash: Buffer16; - let totalMessagesInserted: number; - - let checkpointProposedLogs: Log[]; - let l2MessageSentLogs: Log[]; - - // Maps from block archive to the corresponding txs, versioned blob hashes, and blobs - // REFACTOR: we should have a single method that creates all these artifacts, as well as the l2 proposed event - let allRollupTxs: Map<`0x${string}`, Transaction>; - let allVersionedBlobHashes: Map<`0x${string}`, `0x${string}`[]>; - let allBlobs: Map<`0x${string}`, Blob[]>; - - let logger: Logger; - - const GENESIS_ROOT = new Fr(GENESIS_ARCHIVE_ROOT).toString(); - const ETHEREUM_SLOT_DURATION = BigInt(DefaultL1ContractsConfig.ethereumSlotDuration); - - beforeEach(async () => { - logger = createLogger('archiver:test'); - messagesRollingHash = Buffer16.ZERO; - totalMessagesInserted = 0; - dateProvider = new TestDateProvider(); - now = +new Date(); - publicClient = mock(); - publicClient.getChainId.mockResolvedValue(1); - // Default getBlockNumber mock - tests can override this with mockL1BlockNumbers() or their own mock - publicClient.getBlockNumber.mockResolvedValue(0n); - publicClient.getBlock.mockImplementation((async (args: { blockNumber?: bigint } = {}) => { - args.blockNumber ??= await publicClient.getBlockNumber(); - return { - number: args.blockNumber, - timestamp: BigInt(args.blockNumber) * ETHEREUM_SLOT_DURATION + BigInt(now), - hash: Buffer32.fromBigInt(BigInt(args.blockNumber)).toString(), - } as FormattedBlock; - }) as any); - - // Debug client uses the same mock as public client for tests - debugClient = publicClient; - - blobClient = mock(); - epochCache = mock(); - epochCache.getCommitteeForEpoch.mockResolvedValue({ committee: [] as EthAddress[] } as EpochCommitteeInfo); - - const tracer = getTelemetryClient().getTracer(''); - instrumentation = mock({ isEnabled: () => true, tracer }); - archiverStore = new KVArchiverDataStore(await openTmpStore('archiver_test'), 1000); - l1Constants = { - l1GenesisTime: BigInt(now), - l1StartBlock: 0n, - l1StartBlockHash: Buffer32.random(), - epochDuration: 4, - slotDuration: 24, - ethereumSlotDuration: 12, - proofSubmissionEpochs: 1, - genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT), - }; - - // Initialize global collections first - l2MessageSentLogs = []; - checkpointProposedLogs = []; - allRollupTxs = new Map(); - allVersionedBlobHashes = new Map(); - allBlobs = new Map(); - - publicClient.getTransaction.mockImplementation((args: { hash?: `0x${string}` }) => - Promise.resolve(args.hash ? (allRollupTxs.get(args.hash) as any) : undefined), - ); - - blobClient.getBlobSidecar.mockImplementation((blockId: `0x${string}`, _requestedBlobHashes?: Buffer[]) => - Promise.resolve(allBlobs.get(blockId) || []), - ); - - // Create mock rollup contract - mockRollupRead = mock(); - mockRollupRead.archiveAt.mockImplementation((args: readonly [bigint]) => - Promise.resolve(checkpoints[Number(args[0] - 1n)]?.archive.root.toString() ?? Fr.ZERO.toString()), - ); - mockRollupRead.getVersion.mockImplementation(() => Promise.resolve(1n)); - mockRollupEvents = mock(); - mockRollupEvents.CheckpointProposed.mockImplementation((_filter: any, { fromBlock, toBlock }) => - Promise.resolve( - checkpointProposedLogs.filter(log => log.blockNumber! >= fromBlock && log.blockNumber! <= toBlock), - ), - ); - mockRollup = { - read: mockRollupRead, - getEvents: mockRollupEvents, - address: rollupAddress.toString(), - }; - const rollupWrapper = new RollupContract(publicClient, rollupAddress.toString()); - (rollupWrapper as any).rollup = mockRollup; - - // Create mock inbox contract - mockInboxRead = mock(); - mockInboxRead.getState.mockImplementation(() => - Promise.resolve({ - rollingHash: messagesRollingHash.toString(), - totalMessagesInserted: BigInt(totalMessagesInserted), - inProgress: 0n, - }), - ); - mockInboxEvents = mock(); - mockInboxEvents.MessageSent.mockImplementation( - (filter: { hash?: string }, opts: { fromBlock?: bigint; toBlock?: bigint } = {}) => - Promise.resolve( - l2MessageSentLogs.filter( - log => - (!filter.hash || log.args.hash === filter.hash) && - (!opts.fromBlock || log.blockNumber! >= opts.fromBlock) && - (!opts.toBlock || log.blockNumber! <= opts.toBlock), - ), - ), - ); - mockInbox = { - read: mockInboxRead, - getEvents: mockInboxEvents, - }; - const inboxWrapper = new InboxContract(publicClient, inboxAddress.toString()); - (inboxWrapper as any).inbox = mockInbox; - - const contractAddresses = { - registryAddress, - governanceProposerAddress, - slashFactoryAddress, - slashingProposerAddress, - }; - - archiver = new Archiver( - publicClient, - debugClient, - rollupWrapper, - inboxWrapper, - contractAddresses, - archiverStore, - { - pollingIntervalMs: 1000, - batchSize: 1000, - maxAllowedEthClientDriftSeconds: 300, - ethereumAllowNoDebugHosts: true, - }, - blobClient, - epochCache, - dateProvider, - instrumentation, - l1Constants, - ); - - // Create checkpoints starting from the genesis archive root so that archive roots chain correctly - const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); - ({ checkpoints, messagesPerCheckpoint } = ( - await makeCheckpointsAndMessages(3, { previousArchive: genesisArchive }) - ).reduce( - (acc, { checkpoint, messages, lastArchive: la }) => { - acc.checkpoints.push(checkpoint); - acc.messagesPerCheckpoint.push(messages); - acc.lastArchive = la; - return acc; - }, - { checkpoints: [], messagesPerCheckpoint: [], lastArchive: undefined } as { - checkpoints: Checkpoint[]; - messagesPerCheckpoint: Fr[][]; - lastArchive: AppendOnlyTreeSnapshot | undefined; - }, - )); - }); - - afterEach(async () => { - await archiver?.stop(); - }); - - describe('getPublishedCheckpoints', () => { - it('returns published checkpoints with full checkpoint data', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get all checkpoints starting from 1 - const publishedCheckpoints = await archiver.getPublishedCheckpoints(CheckpointNumber(1), 10); - expect(publishedCheckpoints.length).toBe(3); - expect(publishedCheckpoints.map(c => c.checkpoint.number)).toEqual([1, 2, 3]); - - // Each checkpoint should have blocks - publishedCheckpoints.forEach((pc, i) => { - expect(pc.checkpoint.blocks.length).toBeGreaterThan(0); - expect(pc.checkpoint.archive.root.toString()).toEqual(checkpoints[i].archive.root.toString()); - expect(pc.l1).toBeDefined(); - }); - }, 10_000); - - it('respects the limit parameter', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get only 2 checkpoints starting from 1 - const publishedCheckpoints = await archiver.getPublishedCheckpoints(CheckpointNumber(1), 2); - expect(publishedCheckpoints.length).toBe(2); - expect(publishedCheckpoints.map(c => c.checkpoint.number)).toEqual([1, 2]); - }, 10_000); - - it('respects the starting checkpoint number', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get checkpoints starting from 2 - const publishedCheckpoints = await archiver.getPublishedCheckpoints(CheckpointNumber(2), 10); - expect(publishedCheckpoints.length).toBe(2); - expect(publishedCheckpoints.map(c => c.checkpoint.number)).toEqual([2, 3]); - }, 10_000); - - it('returns empty array when no checkpoints exist', async () => { - mockL1BlockNumbers(100n); - mockRollup.read.status.mockResolvedValue([0n, GENESIS_ROOT, 0n, GENESIS_ROOT, GENESIS_ROOT]); - mockInbox.read.getState.mockResolvedValue(makeInboxStateFromMsgCount(0)); - - await archiver.start(false); - - const publishedCheckpoints = await archiver.getPublishedCheckpoints(CheckpointNumber(1), 10); - expect(publishedCheckpoints).toEqual([]); - }, 10_000); - }); - - describe('getCheckpointsForEpoch', () => { - it('returns checkpoints for a specific epoch based on slot numbers', async () => { - // l1Constants has epochDuration: 4, so epoch 0 has slots 0-3 - // We'll create checkpoints with specific slot numbers to test filtering - - // Create checkpoints with specific slots, chaining archive roots - const [{ checkpoint: cp1, messages: msgs1, lastArchive: archive1 }] = await makeCheckpointsAndMessages(1, { - checkpointStartNumber: CheckpointNumber(1), - blockStartNumber: 1, - }); - cp1.header.slotNumber = SlotNumber(1); // Epoch 0 - - const [{ checkpoint: cp2, messages: msgs2, lastArchive: archive2 }] = await makeCheckpointsAndMessages(1, { - checkpointStartNumber: CheckpointNumber(2), - blockStartNumber: 2, - previousArchive: archive1, - }); - cp2.header.slotNumber = SlotNumber(3); // Epoch 0 - - const [{ checkpoint: cp3, messages: msgs3 }] = await makeCheckpointsAndMessages(1, { - checkpointStartNumber: CheckpointNumber(3), - blockStartNumber: 3, - previousArchive: archive2, - }); - cp3.header.slotNumber = SlotNumber(5); // Epoch 1 - - checkpoints = [cp1, cp2, cp3]; - messagesPerCheckpoint = [msgs1, msgs2, msgs3]; - - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get checkpoints for epoch 0 (slots 0-3) - const epoch0Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(0)); - expect(epoch0Checkpoints.length).toBe(2); - expect(epoch0Checkpoints.map(c => c.number)).toEqual([1, 2]); - - // Get checkpoints for epoch 1 (slots 4-7) - const epoch1Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(1)); - expect(epoch1Checkpoints.length).toBe(1); - expect(epoch1Checkpoints.map(c => c.number)).toEqual([3]); - }, 10_000); - - it('returns empty array for epoch with no checkpoints', async () => { - // Create a checkpoint in epoch 0 - const [{ checkpoint: cp1, messages: msgs1 }] = await makeCheckpointsAndMessages(1, { - checkpointStartNumber: CheckpointNumber(1), - blockStartNumber: 1, - }); - cp1.header.slotNumber = SlotNumber(2); // Epoch 0 - - checkpoints = [cp1]; - messagesPerCheckpoint = [msgs1]; - - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 1n, - checkpoints[0].archive.root.toString(), - GENESIS_ROOT, - ]); - - makeCheckpointProposedEvent(70n, checkpoints[0].number, checkpoints[0].archive.root.toString(), blobHashes[0]); - makeMessageSentEvents(60n, checkpoints[0].number, messagesPerCheckpoint[0]); - mockInbox.read.getState.mockResolvedValue(makeInboxStateFromMsgCount(messagesPerCheckpoint[0].length)); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(1)); - - // Get checkpoints for epoch 1 (slots 4-7) - should be empty - const epoch1Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(1)); - expect(epoch1Checkpoints).toEqual([]); - }, 10_000); - - it('returns checkpoints in correct order (ascending by checkpoint number)', async () => { - // Create multiple checkpoints in the same epoch, chaining archive roots - const [{ checkpoint: cp1, messages: msgs1, lastArchive: archive1 }] = await makeCheckpointsAndMessages(1, { - checkpointStartNumber: CheckpointNumber(1), - blockStartNumber: 1, - }); - cp1.header.slotNumber = SlotNumber(0); // Epoch 0 - - const [{ checkpoint: cp2, messages: msgs2, lastArchive: archive2 }] = await makeCheckpointsAndMessages(1, { - checkpointStartNumber: CheckpointNumber(2), - blockStartNumber: 2, - previousArchive: archive1, - }); - cp2.header.slotNumber = SlotNumber(1); // Epoch 0 - - const [{ checkpoint: cp3, messages: msgs3 }] = await makeCheckpointsAndMessages(1, { - checkpointStartNumber: CheckpointNumber(3), - blockStartNumber: 3, - previousArchive: archive2, - }); - cp3.header.slotNumber = SlotNumber(2); // Epoch 0 - - checkpoints = [cp1, cp2, cp3]; - messagesPerCheckpoint = [msgs1, msgs2, msgs3]; - - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get checkpoints for epoch 0 - should be in ascending order - const epoch0Checkpoints = await archiver.getCheckpointsForEpoch(EpochNumber(0)); - expect(epoch0Checkpoints.length).toBe(3); - expect(epoch0Checkpoints.map(c => c.number)).toEqual([1, 2, 3]); - }, 10_000); - }); - - describe('addBlock (L2BlockSink)', () => { - // State reference needs to be valid for LogStore's dataStartIndexForBlock calculation - // All blocks use checkpoint number 1 since they're being added to the initial checkpoint - const makeBlock = (blockNumber: BlockNumber, indexIntoCheckpoint = 0, previousArchive?: AppendOnlyTreeSnapshot) => - L2BlockNew.random(blockNumber, { - checkpointNumber: CheckpointNumber(1), - state: makeStateReference(0x100), - indexWithinCheckpoint: indexIntoCheckpoint, - ...(previousArchive ? { lastArchive: previousArchive } : {}), - }); - - // Genesis archive for the first block - const genesisArchive = new AppendOnlyTreeSnapshot(new Fr(GENESIS_ARCHIVE_ROOT), 1); - - // Setup minimal L1 mocks needed for sync loop to run - const setupMinimalL1Mocks = () => { - // Use mockResolvedValue (not mockL1BlockNumbers) so it can handle unlimited sync iterations - publicClient.getBlockNumber.mockResolvedValue(100n); - mockRollup.read.status.mockResolvedValue([0n, GENESIS_ROOT, 0n, GENESIS_ROOT, GENESIS_ROOT]); - mockInbox.read.getState.mockResolvedValue(makeInboxStateFromMsgCount(0)); - }; - - it('adds a block to the store', async () => { - setupMinimalL1Mocks(); - const block = await makeBlock(BlockNumber(1), 0, genesisArchive); - await archiver.addBlock(block); - - const retrievedBlock = await archiver.getL2BlockNew(BlockNumber(1)); - expect(retrievedBlock).toBeDefined(); - expect(retrievedBlock!.number).toEqual(BlockNumber(1)); - expect((await retrievedBlock!.header.hash()).toString()).toEqual((await block.header.hash()).toString()); - }); - - it('adds multiple blocks incrementally', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); - const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); - - await archiver.addBlock(block1); - await archiver.addBlock(block2); - await archiver.addBlock(block3); - - const retrievedBlock1 = await archiver.getL2BlockNew(BlockNumber(1)); - const retrievedBlock2 = await archiver.getL2BlockNew(BlockNumber(2)); - const retrievedBlock3 = await archiver.getL2BlockNew(BlockNumber(3)); - - expect(retrievedBlock1!.number).toEqual(BlockNumber(1)); - expect(retrievedBlock2!.number).toEqual(BlockNumber(2)); - expect(retrievedBlock3!.number).toEqual(BlockNumber(3)); - }); - - it('rejects blocks with non-incremental block number (gap)', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - const block3 = await makeBlock(BlockNumber(3), 2, block1.archive); // Skip block 2 - - await archiver.addBlock(block1); - - // Block 3 should be rejected because block 2 is missing - await expect(archiver.addBlock(block3)).rejects.toThrow(InitialBlockNumberNotSequentialError); - }); - - it('rejects blocks with duplicate block numbers', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); - - await archiver.addBlock(block1); - await archiver.addBlock(block2); - - // Adding block 2 again shoud be rejected - await expect(archiver.addBlock(block2)).rejects.toThrow(InitialBlockNumberNotSequentialError); - }); - - it('rejects first block if not starting from block 1', async () => { - setupMinimalL1Mocks(); - - const block5 = await makeBlock(BlockNumber(5), 0, genesisArchive); - - // First block must be block 1 - await expect(archiver.addBlock(block5)).rejects.toThrow(); - }); - - it('allows block number to start from 1 (initial block)', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - - await archiver.addBlock(block1); - - const retrievedBlock = await archiver.getL2BlockNew(BlockNumber(1)); - expect(retrievedBlock).toBeDefined(); - expect(retrievedBlock!.number).toEqual(BlockNumber(1)); - }); - - it('retrieves multiple blocks with getL2BlocksNew', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); - const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); - - await archiver.addBlock(block1); - await archiver.addBlock(block2); - await archiver.addBlock(block3); - - const blocks = await archiver.getL2BlocksNew(BlockNumber(1), 3); - expect(blocks.length).toEqual(3); - expect(await blocks[0].hash()).toEqual(await block1.hash()); - expect(await blocks[1].hash()).toEqual(await block2.hash()); - expect(await blocks[2].hash()).toEqual(await block3.hash()); - }); - - it('retrieves blocks with limit in getL2BlocksNew', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); - const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); - - await archiver.addBlock(block1); - await archiver.addBlock(block2); - await archiver.addBlock(block3); - - // Request only 2 blocks starting from block 1 - const blocks = await archiver.getL2BlocksNew(BlockNumber(1), 2); - expect(blocks.length).toEqual(2); - expect(await blocks[0].hash()).toEqual(await block1.hash()); - expect(await blocks[1].hash()).toEqual(await block2.hash()); - }); - - it('retrieves blocks starting from middle with getL2BlocksNew', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); - const block3 = await makeBlock(BlockNumber(3), 2, block2.archive); - - await archiver.addBlock(block1); - await archiver.addBlock(block2); - await archiver.addBlock(block3); - - // Start from block 2 - const blocks = await archiver.getL2BlocksNew(BlockNumber(2), 2); - expect(blocks.length).toEqual(2); - expect(await blocks[0].hash()).toEqual(await block2.hash()); - expect(await blocks[1].hash()).toEqual(await block3.hash()); - }); - - it('returns empty array when requesting blocks beyond available range', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - - await archiver.addBlock(block1); - - // Request blocks starting from block 5 (which doesn't exist) - const blocks = await archiver.getL2BlocksNew(BlockNumber(5), 3); - expect(blocks).toEqual([]); - }); - - it('returns partial results when limit exceeds available blocks', async () => { - setupMinimalL1Mocks(); - const block1 = await makeBlock(BlockNumber(1), 0, genesisArchive); - const block2 = await makeBlock(BlockNumber(2), 1, block1.archive); - - await archiver.addBlock(block1); - await archiver.addBlock(block2); - - // Request 10 blocks but only 2 are available - const blocks = await archiver.getL2BlocksNew(BlockNumber(1), 10); - expect(blocks.length).toEqual(2); - expect(await blocks[0].hash()).toEqual(await block1.hash()); - expect(await blocks[1].hash()).toEqual(await block2.hash()); - }); - - it('blocks added via addBlock become checkpointed when checkpoint syncs from L1', async () => { - // First, sync checkpoint 1 from L1 to establish a baseline - const checkpoint1 = checkpoints[0]; - const rollupTx1 = makeRollupTx(checkpoint1); - const blobHashes1 = makeVersionedBlobHashes(checkpoint1); - const blobsFromCheckpoint1 = makeBlobsFromCheckpoint(checkpoint1); - - mockL1BlockNumbers(100n, 200n); - - mockRollup.read.status - .mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, checkpoint1.archive.root.toString(), GENESIS_ROOT]) - .mockResolvedValue([ - 1n, - checkpoint1.archive.root.toString(), - 2n, - checkpoints[1].archive.root.toString(), - checkpoint1.archive.root.toString(), - ]); - - makeCheckpointProposedEvent(70n, checkpoint1.number, checkpoint1.archive.root.toString(), blobHashes1); - makeMessageSentEvents(60n, checkpoint1.number, messagesPerCheckpoint[0]); - mockInbox.read.getState.mockResolvedValueOnce(makeInboxStateFromMsgCount(messagesPerCheckpoint[0].length)); - - publicClient.getTransaction.mockResolvedValueOnce(rollupTx1); - blobClient.getBlobSidecar.mockResolvedValueOnce(blobsFromCheckpoint1); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(1)); - - expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); - const lastBlockInCheckpoint1 = checkpoint1.blocks[checkpoint1.blocks.length - 1].number; - - // Verify L2Tips after syncing checkpoint 1: proposed and checkpointed should both be at checkpoint 1 - const tipsAfterCheckpoint1 = await archiver.getL2Tips(); - expect(tipsAfterCheckpoint1.proposed.number).toEqual(lastBlockInCheckpoint1); - expect(tipsAfterCheckpoint1.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); - expect(tipsAfterCheckpoint1.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); - - // Now add blocks for checkpoint 2 via addBlock (simulating local block production) - const checkpoint2 = checkpoints[1]; - for (const block of checkpoint2.blocks) { - await archiver.addBlock(block); - } - - // Verify blocks are retrievable but not yet checkpointed - const lastBlockInCheckpoint2 = checkpoint2.blocks[checkpoint2.blocks.length - 1].number; - expect(await archiver.getBlockNumber()).toEqual(lastBlockInCheckpoint2); - expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); - - // Verify L2Tips after adding blocks: proposed advances but checkpointed stays at checkpoint 1 - const tipsAfterAddBlock = await archiver.getL2Tips(); - expect(tipsAfterAddBlock.proposed.number).toEqual(lastBlockInCheckpoint2); - expect(tipsAfterAddBlock.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); - expect(tipsAfterAddBlock.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); - - // getCheckpointedBlock should return undefined for the new blocks since checkpoint 2 hasn't synced - const firstNewBlockNumber = lastBlockInCheckpoint1 + 1; - const uncheckpointedBlock = await archiver.getCheckpointedBlock(BlockNumber(firstNewBlockNumber)); - expect(uncheckpointedBlock).toBeUndefined(); - - // But getL2BlockNew should work (it retrieves both checkpointed and uncheckpointed blocks) - const block = await archiver.getL2BlockNew(BlockNumber(firstNewBlockNumber)); - expect(block).toBeDefined(); - - // Now sync checkpoint 2 from L1 - const rollupTx2 = makeRollupTx(checkpoint2); - const blobHashes2 = makeVersionedBlobHashes(checkpoint2); - const blobsFromCheckpoint2 = makeBlobsFromCheckpoint(checkpoint2); - - makeCheckpointProposedEvent(170n, checkpoint2.number, checkpoint2.archive.root.toString(), blobHashes2); - makeMessageSentEvents(160n, checkpoint2.number, messagesPerCheckpoint[1]); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint[0].length + messagesPerCheckpoint[1].length), - ); - - publicClient.getTransaction.mockResolvedValueOnce(rollupTx2); - blobClient.getBlobSidecar.mockResolvedValueOnce(blobsFromCheckpoint2); - - await waitUntilArchiverCheckpoint(CheckpointNumber(2)); - - // Now the blocks should be checkpointed - expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(2)); - - // Verify L2Tips after syncing checkpoint 2: proposed and checkpointed should both be at checkpoint 2 - const tipsAfterCheckpoint2 = await archiver.getL2Tips(); - expect(tipsAfterCheckpoint2.proposed.number).toEqual(lastBlockInCheckpoint2); - expect(tipsAfterCheckpoint2.checkpointed.block.number).toEqual(lastBlockInCheckpoint2); - expect(tipsAfterCheckpoint2.checkpointed.checkpoint.number).toEqual(CheckpointNumber(2)); - - // getCheckpointedBlock should now work for the new blocks - const checkpointedBlock = await archiver.getCheckpointedBlock(BlockNumber(firstNewBlockNumber)); - expect(checkpointedBlock).toBeDefined(); - expect(checkpointedBlock!.checkpointNumber).toEqual(2); - }, 10_000); - - it('blocks added via checkpoints can not be added via addblocks', async () => { - // First, sync checkpoint 1 from L1 to establish a baseline - const checkpoint1 = checkpoints[0]; - const rollupTx1 = makeRollupTx(checkpoint1); - const blobHashes1 = makeVersionedBlobHashes(checkpoint1); - const blobsFromCheckpoint1 = makeBlobsFromCheckpoint(checkpoint1); - - mockL1BlockNumbers(100n, 200n); - - mockRollup.read.status - .mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, checkpoint1.archive.root.toString(), GENESIS_ROOT]) - .mockResolvedValue([ - 1n, - checkpoint1.archive.root.toString(), - 2n, - checkpoints[1].archive.root.toString(), - checkpoint1.archive.root.toString(), - ]); - - makeCheckpointProposedEvent(70n, checkpoint1.number, checkpoint1.archive.root.toString(), blobHashes1); - makeMessageSentEvents(60n, checkpoint1.number, messagesPerCheckpoint[0]); - mockInbox.read.getState.mockResolvedValueOnce(makeInboxStateFromMsgCount(messagesPerCheckpoint[0].length)); - - publicClient.getTransaction.mockResolvedValueOnce(rollupTx1); - blobClient.getBlobSidecar.mockResolvedValueOnce(blobsFromCheckpoint1); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(1)); - - expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); - const blockAlreadySyncedFromCheckpoint = checkpoint1.blocks[checkpoint1.blocks.length - 1]; - - // Now try and add one of the blocks via the addBlocks method. It should throw - await expect(archiver.addBlock(blockAlreadySyncedFromCheckpoint)).rejects.toThrow( - InitialBlockNumberNotSequentialError, - ); - }, 10_000); - - it('can add more blocks after checkpoint syncs and then sync another checkpoint', async () => { - // Sync the first checkpoint normally - const checkpoint1 = checkpoints[0]; - const rollupTx1 = makeRollupTx(checkpoint1); - const blobHashes1 = makeVersionedBlobHashes(checkpoint1); - const blobsFromCheckpoint1 = makeBlobsFromCheckpoint(checkpoint1); - - mockL1BlockNumbers(100n, 200n); - - mockRollup.read.status - .mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, checkpoint1.archive.root.toString(), GENESIS_ROOT]) - .mockResolvedValue([ - 1n, - checkpoint1.archive.root.toString(), - 2n, - checkpoints[1].archive.root.toString(), - checkpoint1.archive.root.toString(), - ]); - - makeCheckpointProposedEvent(70n, checkpoint1.number, checkpoint1.archive.root.toString(), blobHashes1); - makeMessageSentEvents(60n, checkpoint1.number, messagesPerCheckpoint[0]); - mockInbox.read.getState.mockResolvedValueOnce(makeInboxStateFromMsgCount(messagesPerCheckpoint[0].length)); - - publicClient.getTransaction.mockResolvedValueOnce(rollupTx1); - blobClient.getBlobSidecar.mockResolvedValueOnce(blobsFromCheckpoint1); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(1)); - - expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); - const lastBlockInCheckpoint1 = checkpoint1.blocks[checkpoint1.blocks.length - 1].number; - - // Verify L2Tips after syncing checkpoint 1: proposed and checkpointed at checkpoint 1 - const tipsAfterCheckpoint1 = await archiver.getL2Tips(); - expect(tipsAfterCheckpoint1.proposed.number).toEqual(lastBlockInCheckpoint1); - expect(tipsAfterCheckpoint1.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); - expect(tipsAfterCheckpoint1.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); - - // Now add more blocks via addBlock (simulating local block production ahead of L1) - const checkpoint2 = checkpoints[1]; - for (const block of checkpoint2.blocks) { - await archiver.addBlock(block); - } - - // Verify blocks are retrievable - const lastBlockInCheckpoint2 = checkpoint2.blocks[checkpoint2.blocks.length - 1].number; - expect(await archiver.getBlockNumber()).toEqual(lastBlockInCheckpoint2); - - // But checkpoint number should still be 1 - expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); - - // Verify L2Tips after adding blocks: proposed advances, checkpointed stays at checkpoint 1 - const tipsAfterAddBlock = await archiver.getL2Tips(); - expect(tipsAfterAddBlock.proposed.number).toEqual(lastBlockInCheckpoint2); - expect(tipsAfterAddBlock.checkpointed.block.number).toEqual(lastBlockInCheckpoint1); - expect(tipsAfterAddBlock.checkpointed.checkpoint.number).toEqual(CheckpointNumber(1)); - - // New blocks should not be checkpointed yet - const firstNewBlockNumber = lastBlockInCheckpoint1 + 1; - const uncheckpointedBlock = await archiver.getCheckpointedBlock(BlockNumber(firstNewBlockNumber)); - expect(uncheckpointedBlock).toBeUndefined(); - - // Now sync checkpoint 2 from L1 - const rollupTx2 = makeRollupTx(checkpoint2); - const blobHashes2 = makeVersionedBlobHashes(checkpoint2); - const blobsFromCheckpoint2 = makeBlobsFromCheckpoint(checkpoint2); - - makeCheckpointProposedEvent(170n, checkpoint2.number, checkpoint2.archive.root.toString(), blobHashes2); - makeMessageSentEvents(160n, checkpoint2.number, messagesPerCheckpoint[1]); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint[0].length + messagesPerCheckpoint[1].length), - ); - - publicClient.getTransaction.mockResolvedValueOnce(rollupTx2); - blobClient.getBlobSidecar.mockResolvedValueOnce(blobsFromCheckpoint2); - - await waitUntilArchiverCheckpoint(CheckpointNumber(2)); - - // Now all blocks should be checkpointed - expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(2)); - - // Verify L2Tips after syncing checkpoint 2: both proposed and checkpointed at checkpoint 2 - const tipsAfterCheckpoint2 = await archiver.getL2Tips(); - expect(tipsAfterCheckpoint2.proposed.number).toEqual(lastBlockInCheckpoint2); - expect(tipsAfterCheckpoint2.checkpointed.block.number).toEqual(lastBlockInCheckpoint2); - expect(tipsAfterCheckpoint2.checkpointed.checkpoint.number).toEqual(CheckpointNumber(2)); - - const checkpointedBlock = await archiver.getCheckpointedBlock(BlockNumber(firstNewBlockNumber)); - expect(checkpointedBlock).toBeDefined(); - expect(checkpointedBlock!.checkpointNumber).toEqual(2); - }, 10_000); - }); - - // TODO(palla/reorg): Add a unit test for the archiver handleEpochPrune - xit('handles an upcoming L2 prune', () => {}); - - describe('getCheckpointedBlocks', () => { - it('returns checkpointed blocks with checkpoint info', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get checkpointed blocks starting from block 1 - const checkpointedBlocks = await archiver.getCheckpointedBlocks(BlockNumber(1), 100); - - // Should return all blocks from all checkpoints - const expectedBlocks = checkpoints.flatMap(c => c.blocks); - expect(checkpointedBlocks.length).toBe(expectedBlocks.length); - - // Verify blocks are returned in correct order and have correct checkpoint info - let blockIndex = 0; - for (let cpIdx = 0; cpIdx < checkpoints.length; cpIdx++) { - const checkpoint = checkpoints[cpIdx]; - for (let i = 0; i < checkpoint.blocks.length; i++) { - const cb = checkpointedBlocks[blockIndex]; - const expectedBlock = checkpoint.blocks[i]; - - // Verify block number matches - expect(cb.block.number).toBe(expectedBlock.number); - - // Verify checkpoint number is correct - expect(cb.checkpointNumber).toBe(checkpoint.number); - - // Verify archive root matches (more reliable than hash which depends on L1-to-L2 messages) - expect(cb.block.archive.root.toString()).toBe(expectedBlock.archive.root.toString()); - - // Verify L1 published data is present - expect(cb.l1).toBeDefined(); - expect(cb.l1.blockNumber).toBeGreaterThan(0n); - - blockIndex++; - } - } - }, 10_000); - - it('respects the limit parameter', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get only 2 checkpointed blocks starting from block 1 (out of 3 total) - const checkpointedBlocks = await archiver.getCheckpointedBlocks(BlockNumber(1), 2); - expect(checkpointedBlocks.length).toBe(2); - - // Verify exact block numbers (blocks 1 and 2) - expect(checkpointedBlocks[0].block.number).toBe(BlockNumber(1)); - expect(checkpointedBlocks[1].block.number).toBe(BlockNumber(2)); - - // Verify archive roots match original checkpoint blocks - expect(checkpointedBlocks[0].block.archive.root.toString()).toBe( - checkpoints[0].blocks[0].archive.root.toString(), - ); - expect(checkpointedBlocks[1].block.archive.root.toString()).toBe( - checkpoints[1].blocks[0].archive.root.toString(), - ); - - // Verify checkpoint numbers (block 1 is from checkpoint 1, block 2 is from checkpoint 2) - expect(checkpointedBlocks[0].checkpointNumber).toBe(1); - expect(checkpointedBlocks[1].checkpointNumber).toBe(2); - }, 10_000); - - it('returns blocks starting from specified block number', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - mockRollup.read.status.mockResolvedValue([ - 0n, - GENESIS_ROOT, - 3n, - checkpoints[2].archive.root.toString(), - GENESIS_ROOT, - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get blocks starting from block 2 (skip block 1, get blocks 2 and 3) - const checkpointedBlocks = await archiver.getCheckpointedBlocks(BlockNumber(2), 10); - - // Should return 2 blocks (blocks 2 and 3 - since there are only 3 blocks total, 1 per checkpoint) - expect(checkpointedBlocks.length).toBe(2); - - // Verify block numbers are sequential starting from 2 - expect(checkpointedBlocks[0].block.number).toBe(BlockNumber(2)); - expect(checkpointedBlocks[1].block.number).toBe(BlockNumber(3)); - - // Verify checkpoint numbers (block 2 is from checkpoint 2, block 3 is from checkpoint 3) - expect(checkpointedBlocks[0].checkpointNumber).toBe(2); - expect(checkpointedBlocks[1].checkpointNumber).toBe(3); - - // Verify archive roots match expected blocks from checkpoints - expect(checkpointedBlocks[0].block.archive.root.toString()).toBe( - checkpoints[1].blocks[0].archive.root.toString(), - ); - expect(checkpointedBlocks[1].block.archive.root.toString()).toBe( - checkpoints[2].blocks[0].archive.root.toString(), - ); - }, 10_000); - - it('returns empty array when no checkpointed blocks exist', async () => { - mockL1BlockNumbers(100n); - mockRollup.read.status.mockResolvedValue([0n, GENESIS_ROOT, 0n, GENESIS_ROOT, GENESIS_ROOT]); - mockInbox.read.getState.mockResolvedValue(makeInboxStateFromMsgCount(0)); - - await archiver.start(false); - - const checkpointedBlocks = await archiver.getCheckpointedBlocks(BlockNumber(1), 10); - expect(checkpointedBlocks).toEqual([]); - }, 10_000); - - it('filters by proven status when proven=true', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - // Set checkpoint 1 as proven (provenCheckpointNumber = 1) - mockRollup.read.status.mockResolvedValue([ - 1n, // provenCheckpointNumber - checkpoints[0].archive.root.toString(), // provenArchive - 3n, // pendingCheckpointNumber - checkpoints[2].archive.root.toString(), // pendingArchive - checkpoints[0].archive.root.toString(), // archiveForLocalPendingCheckpointNumber - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get all checkpointed blocks without proven filter - const allBlocks = await archiver.getCheckpointedBlocks(BlockNumber(1), 100); - const totalBlocks = checkpoints.reduce((acc, c) => acc + c.blocks.length, 0); - expect(allBlocks.length).toBe(totalBlocks); - - // Get only proven checkpointed blocks (should only include blocks from checkpoint 1) - const provenBlocks = await archiver.getCheckpointedBlocks(BlockNumber(1), 100, true); - const checkpoint1Blocks = checkpoints[0].blocks; - expect(provenBlocks.length).toBe(checkpoint1Blocks.length); - - // Verify all proven blocks are from checkpoint 1 and match expected blocks - for (let i = 0; i < provenBlocks.length; i++) { - const cb = provenBlocks[i]; - expect(cb.checkpointNumber).toBe(1); - expect(cb.block.number).toBe(checkpoint1Blocks[i].number); - - // Verify archive root matches (more reliable than hash which depends on L1-to-L2 messages) - expect(cb.block.archive.root.toString()).toBe(checkpoint1Blocks[i].archive.root.toString()); - } - - // Verify the last proven block number matches the last block of checkpoint 1 - const lastProvenBlock = provenBlocks[provenBlocks.length - 1]; - const lastCheckpoint1Block = checkpoint1Blocks[checkpoint1Blocks.length - 1]; - expect(lastProvenBlock.block.number).toBe(lastCheckpoint1Block.number); - }, 10_000); - }); - - describe('getL2BlocksNew with proven filter', () => { - it('filters by proven status when proven=true', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - // Set checkpoint 1 as proven (provenCheckpointNumber = 1) - mockRollup.read.status.mockResolvedValue([ - 1n, // provenCheckpointNumber - checkpoints[0].archive.root.toString(), // provenArchive - 3n, // pendingCheckpointNumber - checkpoints[2].archive.root.toString(), // pendingArchive - checkpoints[0].archive.root.toString(), // archiveForLocalPendingCheckpointNumber - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - // Get all blocks without proven filter - const allBlocks = await archiver.getL2BlocksNew(BlockNumber(1), 100); - const totalBlocks = checkpoints.reduce((acc, c) => acc + c.blocks.length, 0); - expect(allBlocks.length).toBe(totalBlocks); - - // Get only proven blocks (should only include blocks from checkpoint 1) - const provenBlocks = await archiver.getL2BlocksNew(BlockNumber(1), 100, true); - const checkpoint1Blocks = checkpoints[0].blocks; - expect(provenBlocks.length).toBe(checkpoint1Blocks.length); - - // Verify block numbers match checkpoint 1 blocks - for (let i = 0; i < provenBlocks.length; i++) { - expect(provenBlocks[i].number).toBe(checkpoint1Blocks[i].number); - - // Verify archive root matches (more reliable than hash which depends on L1-to-L2 messages) - expect(provenBlocks[i].archive.root.toString()).toBe(checkpoint1Blocks[i].archive.root.toString()); - } - - // Verify the last proven block is the last block of checkpoint 1 - const lastProvenBlockNumber = checkpoint1Blocks[checkpoint1Blocks.length - 1].number; - expect(provenBlocks[provenBlocks.length - 1].number).toBe(lastProvenBlockNumber); - - // Verify no unproven blocks are included - const unprovenBlockNumbers = checkpoints.slice(1).flatMap(c => c.blocks.map(b => b.number)); - provenBlocks.forEach(b => { - expect(unprovenBlockNumbers).not.toContain(b.number); - }); - }, 10_000); - - it('returns all blocks when proven=false or undefined', async () => { - const rollupTxs = checkpoints.map(c => makeRollupTx(c)); - const blobHashes = checkpoints.map(makeVersionedBlobHashes); - - mockL1BlockNumbers(100n); - - // Set checkpoint 1 as proven - mockRollup.read.status.mockResolvedValue([ - 1n, - checkpoints[0].archive.root.toString(), - 3n, - checkpoints[2].archive.root.toString(), - checkpoints[0].archive.root.toString(), - ]); - - checkpoints.forEach((c, i) => - makeCheckpointProposedEvent(70n + BigInt(i) * 10n, c.number, c.archive.root.toString(), blobHashes[i]), - ); - messagesPerCheckpoint.forEach((messages, i) => - makeMessageSentEvents(60n + BigInt(i) * 5n, checkpoints[i].number, messages), - ); - mockInbox.read.getState.mockResolvedValue( - makeInboxStateFromMsgCount(messagesPerCheckpoint.reduce((acc, curr) => acc + curr.length, 0)), - ); - - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); - const blobsFromCheckpoints = checkpoints.map(c => makeBlobsFromCheckpoint(c)); - blobsFromCheckpoints.forEach(blobs => blobClient.getBlobSidecar.mockResolvedValueOnce(blobs)); - - await archiver.start(false); - await waitUntilArchiverCheckpoint(CheckpointNumber(3)); - - const expectedBlocks = checkpoints.flatMap(c => c.blocks); - const totalBlocks = expectedBlocks.length; - - // Get blocks with proven=false - should include all blocks - const blocksProvenFalse = await archiver.getL2BlocksNew(BlockNumber(1), 100, false); - expect(blocksProvenFalse.length).toBe(totalBlocks); - - // Verify all block numbers are present - for (let i = 0; i < blocksProvenFalse.length; i++) { - expect(blocksProvenFalse[i].number).toBe(expectedBlocks[i].number); - } - - // Get blocks with proven=undefined - should include all blocks - const blocksProvenUndefined = await archiver.getL2BlocksNew(BlockNumber(1), 100); - expect(blocksProvenUndefined.length).toBe(totalBlocks); - - // Verify all block numbers match - for (let i = 0; i < blocksProvenUndefined.length; i++) { - expect(blocksProvenUndefined[i].number).toBe(expectedBlocks[i].number); - } - - // Verify blocks include unproven blocks (from checkpoints 2 and 3) - const unprovenBlockNumbers = checkpoints.slice(1).flatMap(c => c.blocks.map(b => b.number)); - const returnedBlockNumbers = blocksProvenFalse.map(b => b.number); - unprovenBlockNumbers.forEach(unprovenNum => { - expect(returnedBlockNumbers).toContain(unprovenNum); - }); - }, 10_000); - }); - - const waitUntilArchiverCheckpoint = async (checkpointNumber: CheckpointNumber) => { - logger.info(`Waiting for archiver to sync to checkpoint ${checkpointNumber}`); - await retryUntil(() => archiver.getSynchedCheckpointNumber().then(n => n === checkpointNumber), 'sync', 10, 0.1); - }; - - /** Makes a fake Inbox state assuming this many messages have been created. */ - const makeInboxStateFromMsgCount = (msgCount: number) => { - return { - rollingHash: msgCount === 0 ? Buffer16.ZERO.toString() : l2MessageSentLogs[msgCount - 1].args.rollingHash, - totalMessagesInserted: BigInt(msgCount), - inProgress: 0n, - }; - }; - - /** - * Makes a fake CheckpointProposed event for testing purposes and registers it to be returned by the public client. - * @param l1BlockNum - L1 block number. - * @param checkpointNumber - Checkpoint number. - */ - const makeCheckpointProposedEvent = ( - l1BlockNum: bigint, - checkpointNumber: CheckpointNumber, - archive: `0x${string}`, - versionedBlobHashes: `0x${string}`[], - ) => { - const log = { - blockNumber: l1BlockNum, - blockHash: Buffer32.fromBigInt(l1BlockNum).toString(), - args: { checkpointNumber: BigInt(checkpointNumber), archive, versionedBlobHashes }, - transactionHash: archive, - } as unknown as Log; - checkpointProposedLogs.push(log); - }; - - /** - * Makes fake L1ToL2 MessageSent events for testing purposes and registers it to be returned by the public client. - * @param l1BlockNum - L1 block number. - * @param checkpointNumber - The checkpoint number for which the message was included. - * @param indexInSubtree - the index in the l2Block's subtree in the L1 to L2 Messages Tree. - */ - const makeMessageSentEvent = ( - l1BlockNum: bigint, - checkpointNumber: CheckpointNumber, - indexInSubtree: bigint, - leaf: Fr, - ) => { - const index = indexInSubtree + InboxLeaf.smallestIndexForCheckpoint(checkpointNumber); - messagesRollingHash = updateRollingHash(messagesRollingHash, leaf); - totalMessagesInserted++; - - const log = { - blockNumber: l1BlockNum, - blockHash: Buffer32.fromBigInt(l1BlockNum).toString(), - args: { - checkpointNumber: BigInt(checkpointNumber), - index, - hash: leaf.toString(), - rollingHash: messagesRollingHash.toString(), - }, - transactionHash: `0x${l1BlockNum}`, - } as Log; - l2MessageSentLogs.push(log); - return { log, leaf, index }; - }; - - const makeMessageSentEvents = (fromL1BlockNum: bigint, checkpointNumber: CheckpointNumber, messages: Fr[]) => { - return messages.map((msg, index) => - makeMessageSentEvent(fromL1BlockNum + BigInt(index), checkpointNumber, BigInt(index), msg), - ); - }; - - /** - * Makes a fake rollup tx for testing purposes. - * @param checkpoint - The checkpoint. - * @returns A fake tx with calldata that corresponds to calling process in the Rollup contract. - */ - const makeRollupTx = (checkpoint: Checkpoint, signers: Secp256k1Signer[] = []) => { - const attestations = signers - .map(signer => makeCheckpointAttestationFromCheckpoint(checkpoint, signer)) - .map(attestation => CommitteeAttestation.fromSignature(attestation.signature)) - .map(committeeAttestation => committeeAttestation.toViem()); - const header = checkpoint.header.toViem(); - const blobInput = getPrefixedEthBlobCommitments(getBlobsPerL1Block(checkpoint.toBlobFields())); - const archive = toHex(checkpoint.archive.root.toBuffer()); - const attestationsAndSigners = new CommitteeAttestationsAndSigners( - attestations.map(attestation => CommitteeAttestation.fromViem(attestation)), - ); - - const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( - attestationsAndSigners, - signers[0], - ); - const rollupInput = encodeFunctionData({ - abi: RollupAbi, - functionName: 'propose', - args: [ - { - header, - archive, - oracleInput: { feeAssetPriceModifier: 0n }, - }, - attestationsAndSigners.getPackedAttestations(), - attestationsAndSigners.getSigners().map(signer => signer.toString()), - attestationsAndSignersSignature.toViemSignature(), - blobInput, - ], - }); - - const multiCallInput = encodeFunctionData({ - abi: multicall3Abi, - functionName: 'aggregate3', - args: [ - [ - { - target: rollupAddress.toString(), - callData: rollupInput, - allowFailure: false, - }, - ], - ], - }); - const tx = { - input: multiCallInput, - hash: archive, - blockHash: archive, - to: MULTI_CALL_3_ADDRESS as `0x${string}`, - } as Transaction; - allRollupTxs.set(checkpoint.archive.root.toString(), tx); - return tx; - }; - - /** - * Makes versioned blob hashes for testing purposes. - * @param checkpoint - The checkpoint. - * @returns Versioned blob hashes. - */ - const makeVersionedBlobHashes = (checkpoint: Checkpoint): `0x${string}`[] => { - const blobFields = checkpoint.toBlobFields(); - const blobs = getBlobsPerL1Block(blobFields); - const blobHashes = blobs.map(b => b.getEthVersionedBlobHash()).map(bufferToHex); - allVersionedBlobHashes.set(checkpoint.archive.root.toString(), blobHashes); - return blobHashes; - }; - - /** - * Blob response to be returned from the blob client based on the expected checkpoint. - * @param checkpoint - The checkpoint. - * @returns The blobs. - */ - const makeBlobsFromCheckpoint = (checkpoint: Checkpoint) => { - const blobFields = checkpoint.toBlobFields(); - const blobs = getBlobsPerL1Block(blobFields); - allBlobs.set(checkpoint.archive.root.toString(), blobs); - return blobs; - }; -}); diff --git a/yarn-project/archiver/src/archiver/archiver_store_updates.ts b/yarn-project/archiver/src/archiver/archiver_store_updates.ts deleted file mode 100644 index ec8b57e564dc..000000000000 --- a/yarn-project/archiver/src/archiver/archiver_store_updates.ts +++ /dev/null @@ -1,321 +0,0 @@ -import type { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; -import { Fr } from '@aztec/foundation/curves/bn254'; -import { createLogger } from '@aztec/foundation/log'; -import { - ContractClassPublishedEvent, - PrivateFunctionBroadcastedEvent, - UtilityFunctionBroadcastedEvent, -} from '@aztec/protocol-contracts/class-registry'; -import { - ContractInstancePublishedEvent, - ContractInstanceUpdatedEvent, -} from '@aztec/protocol-contracts/instance-registry'; -import type { L2BlockNew, ValidateCheckpointResult } from '@aztec/stdlib/block'; -import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; -import { - type ExecutablePrivateFunctionWithMembershipProof, - type UtilityFunctionWithMembershipProof, - computePublicBytecodeCommitment, - isValidPrivateFunctionMembershipProof, - isValidUtilityFunctionMembershipProof, -} from '@aztec/stdlib/contract'; -import type { ContractClassLog, PrivateLog, PublicLog } from '@aztec/stdlib/logs'; -import type { UInt64 } from '@aztec/stdlib/types'; - -import groupBy from 'lodash.groupby'; - -import type { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; - -const log = createLogger('archiver:store-updates'); - -/** Operation type for contract data updates. */ -enum Operation { - Store, - Delete, -} - -/** - * Adds blocks to the store with contract class/instance extraction from logs. - * Extracts ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated events, - * and individually broadcasted functions from the block logs. - * - * @param store - The archiver data store. - * @param blocks - The L2 blocks to add. - * @param pendingChainValidationStatus - Optional validation status to set. - * @returns True if the operation is successful. - */ -export function addBlocksWithContractData( - store: KVArchiverDataStore, - blocks: L2BlockNew[], - pendingChainValidationStatus?: ValidateCheckpointResult, -): Promise { - return store.transactionAsync(async () => { - await store.addBlocks(blocks); - - const opResults = await Promise.all([ - // Update the pending chain validation status if provided - pendingChainValidationStatus && store.setPendingChainValidationStatus(pendingChainValidationStatus), - // Add any logs emitted during the retrieved blocks - store.addLogs(blocks), - // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them - ...blocks.map(block => addBlockDataToDB(store, block)), - ]); - - return opResults.every(Boolean); - }); -} - -/** - * Adds checkpoints to the store with contract class/instance extraction from logs. - * Extracts ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated events, - * and individually broadcasted functions from the checkpoint block logs. - * - * @param store - The archiver data store. - * @param checkpoints - The published checkpoints to add. - * @param pendingChainValidationStatus - Optional validation status to set. - * @returns True if the operation is successful. - */ -export function addCheckpointsWithContractData( - store: KVArchiverDataStore, - checkpoints: PublishedCheckpoint[], - pendingChainValidationStatus?: ValidateCheckpointResult, -): Promise { - return store.transactionAsync(async () => { - await store.addCheckpoints(checkpoints); - const allBlocks = checkpoints.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks); - - const opResults = await Promise.all([ - // Update the pending chain validation status if provided - pendingChainValidationStatus && store.setPendingChainValidationStatus(pendingChainValidationStatus), - // Add any logs emitted during the retrieved blocks - store.addLogs(allBlocks), - // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them - ...allBlocks.map(block => addBlockDataToDB(store, block)), - ]); - - return opResults.every(Boolean); - }); -} - -/** - * Unwinds checkpoints from the store with reverse contract extraction. - * Deletes ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated data - * that was stored for the unwound checkpoints. - * - * @param store - The archiver data store. - * @param from - The checkpoint number to unwind from (must be the current tip). - * @param checkpointsToUnwind - The number of checkpoints to unwind. - * @returns True if the operation is successful. - */ -export async function unwindCheckpointsWithContractData( - store: KVArchiverDataStore, - from: CheckpointNumber, - checkpointsToUnwind: number, -): Promise { - if (checkpointsToUnwind <= 0) { - throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`); - } - - const last = await store.getSynchedCheckpointNumber(); - if (from != last) { - throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`); - } - - const blocks = []; - const lastCheckpointNumber = from + checkpointsToUnwind - 1; - for (let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++) { - const blocksForCheckpoint = await store.getBlocksForCheckpoint(checkpointNumber); - if (!blocksForCheckpoint) { - continue; - } - blocks.push(...blocksForCheckpoint); - } - - const opResults = await Promise.all([ - // Prune rolls back to the last proven block, which is by definition valid - store.setPendingChainValidationStatus({ valid: true }), - // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them - ...blocks.map(async block => { - const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs); - // ContractInstancePublished event logs are broadcast in privateLogs. - const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs); - const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs); - - return ( - await Promise.all([ - updatePublishedContractClasses(store, contractClassLogs, block.number, Operation.Delete), - updateDeployedContractInstances(store, privateLogs, block.number, Operation.Delete), - updateUpdatedContractInstances(store, publicLogs, block.header.globalVariables.timestamp, Operation.Delete), - ]) - ).every(Boolean); - }), - - store.deleteLogs(blocks), - store.unwindCheckpoints(from, checkpointsToUnwind), - ]); - - return opResults.every(Boolean); -} - -/** - * Extracts and stores contract data from a single block. - */ -async function addBlockDataToDB(store: KVArchiverDataStore, block: L2BlockNew): Promise { - const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs); - // ContractInstancePublished event logs are broadcast in privateLogs. - const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs); - const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs); - - return ( - await Promise.all([ - updatePublishedContractClasses(store, contractClassLogs, block.number, Operation.Store), - updateDeployedContractInstances(store, privateLogs, block.number, Operation.Store), - updateUpdatedContractInstances(store, publicLogs, block.header.globalVariables.timestamp, Operation.Store), - storeBroadcastedIndividualFunctions(store, contractClassLogs, block.number), - ]) - ).every(Boolean); -} - -/** - * Extracts and stores contract classes out of ContractClassPublished events emitted by the class registry contract. - */ -async function updatePublishedContractClasses( - store: KVArchiverDataStore, - allLogs: ContractClassLog[], - blockNum: BlockNumber, - operation: Operation, -): Promise { - const contractClassPublishedEvents = allLogs - .filter(log => ContractClassPublishedEvent.isContractClassPublishedEvent(log)) - .map(log => ContractClassPublishedEvent.fromLog(log)); - - const contractClasses = await Promise.all(contractClassPublishedEvents.map(e => e.toContractClassPublic())); - if (contractClasses.length > 0) { - contractClasses.forEach(c => log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`)); - if (operation == Operation.Store) { - // TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive - const commitments = await Promise.all( - contractClasses.map(c => computePublicBytecodeCommitment(c.packedBytecode)), - ); - return await store.addContractClasses(contractClasses, commitments, blockNum); - } else if (operation == Operation.Delete) { - return await store.deleteContractClasses(contractClasses, blockNum); - } - } - return true; -} - -/** - * Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract. - */ -async function updateDeployedContractInstances( - store: KVArchiverDataStore, - allLogs: PrivateLog[], - blockNum: BlockNumber, - operation: Operation, -): Promise { - const contractInstances = allLogs - .filter(log => ContractInstancePublishedEvent.isContractInstancePublishedEvent(log)) - .map(log => ContractInstancePublishedEvent.fromLog(log)) - .map(e => e.toContractInstance()); - if (contractInstances.length > 0) { - contractInstances.forEach(c => log.verbose(`${Operation[operation]} contract instance at ${c.address.toString()}`)); - if (operation == Operation.Store) { - return await store.addContractInstances(contractInstances, blockNum); - } else if (operation == Operation.Delete) { - return await store.deleteContractInstances(contractInstances, blockNum); - } - } - return true; -} - -/** - * Extracts and stores contract instance updates out of ContractInstanceUpdated events. - */ -async function updateUpdatedContractInstances( - store: KVArchiverDataStore, - allLogs: PublicLog[], - timestamp: UInt64, - operation: Operation, -): Promise { - const contractUpdates = allLogs - .filter(log => ContractInstanceUpdatedEvent.isContractInstanceUpdatedEvent(log)) - .map(log => ContractInstanceUpdatedEvent.fromLog(log)) - .map(e => e.toContractInstanceUpdate()); - - if (contractUpdates.length > 0) { - contractUpdates.forEach(c => - log.verbose(`${Operation[operation]} contract instance update at ${c.address.toString()}`), - ); - if (operation == Operation.Store) { - return await store.addContractInstanceUpdates(contractUpdates, timestamp); - } else if (operation == Operation.Delete) { - return await store.deleteContractInstanceUpdates(contractUpdates, timestamp); - } - } - return true; -} - -/** - * Stores the functions that were broadcasted individually. - * - * @dev Beware that there is not a delete variant of this, since they are added to contract classes - * and will be deleted as part of the class if needed. - */ -async function storeBroadcastedIndividualFunctions( - store: KVArchiverDataStore, - allLogs: ContractClassLog[], - _blockNum: BlockNumber, -): Promise { - // Filter out private and utility function broadcast events - const privateFnEvents = allLogs - .filter(log => PrivateFunctionBroadcastedEvent.isPrivateFunctionBroadcastedEvent(log)) - .map(log => PrivateFunctionBroadcastedEvent.fromLog(log)); - const utilityFnEvents = allLogs - .filter(log => UtilityFunctionBroadcastedEvent.isUtilityFunctionBroadcastedEvent(log)) - .map(log => UtilityFunctionBroadcastedEvent.fromLog(log)); - - // Group all events by contract class id - for (const [classIdString, classEvents] of Object.entries( - groupBy([...privateFnEvents, ...utilityFnEvents], e => e.contractClassId.toString()), - )) { - const contractClassId = Fr.fromHexString(classIdString); - const contractClass = await store.getContractClass(contractClassId); - if (!contractClass) { - log.warn(`Skipping broadcasted functions as contract class ${contractClassId.toString()} was not found`); - continue; - } - - // Split private and utility functions, and filter out invalid ones - const allFns = classEvents.map(e => e.toFunctionWithMembershipProof()); - const privateFns = allFns.filter( - (fn): fn is ExecutablePrivateFunctionWithMembershipProof => 'utilityFunctionsTreeRoot' in fn, - ); - const utilityFns = allFns.filter( - (fn): fn is UtilityFunctionWithMembershipProof => 'privateFunctionsArtifactTreeRoot' in fn, - ); - - const privateFunctionsWithValidity = await Promise.all( - privateFns.map(async fn => ({ fn, valid: await isValidPrivateFunctionMembershipProof(fn, contractClass) })), - ); - const validPrivateFns = privateFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn); - const utilityFunctionsWithValidity = await Promise.all( - utilityFns.map(async fn => ({ - fn, - valid: await isValidUtilityFunctionMembershipProof(fn, contractClass), - })), - ); - const validUtilityFns = utilityFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn); - const validFnCount = validPrivateFns.length + validUtilityFns.length; - if (validFnCount !== allFns.length) { - log.warn(`Skipping ${allFns.length - validFnCount} invalid functions`); - } - - // Store the functions in the contract class in a single operation - if (validFnCount > 0) { - log.verbose(`Storing ${validFnCount} functions for contract class ${contractClassId.toString()}`); - } - return await store.addFunctions(contractClassId, validPrivateFns, validUtilityFns); - } - return true; -} diff --git a/yarn-project/archiver/src/archiver/index.ts b/yarn-project/archiver/src/archiver/index.ts deleted file mode 100644 index 8053b725b55b..000000000000 --- a/yarn-project/archiver/src/archiver/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -export * from './archiver.js'; -export * from './archive_source_base.js'; -export * from './archiver_store_updates.js'; -export * from './config.js'; -export { type L1PublishedData } from './structs/published.js'; -export { KVArchiverDataStore, ARCHIVER_DB_VERSION } from './kv_archiver_store/kv_archiver_store.js'; -export { ContractInstanceStore } from './kv_archiver_store/contract_instance_store.js'; diff --git a/yarn-project/archiver/src/archiver/config.ts b/yarn-project/archiver/src/config.ts similarity index 87% rename from yarn-project/archiver/src/archiver/config.ts rename to yarn-project/archiver/src/config.ts index 66cddc373209..e3ed77eb71dc 100644 --- a/yarn-project/archiver/src/archiver/config.ts +++ b/yarn-project/archiver/src/config.ts @@ -82,3 +82,14 @@ export const archiverConfigMappings: ConfigMappingsType = { export function getArchiverConfigFromEnv(): ArchiverConfig { return getConfigFromMappings(archiverConfigMappings); } + +/** Extracts the archiver-specific configuration from the full ArchiverConfig */ +export function mapArchiverConfig(config: Partial) { + return { + pollingIntervalMs: config.archiverPollingIntervalMS, + batchSize: config.archiverBatchSize, + skipValidateCheckpointAttestations: config.skipValidateCheckpointAttestations, + maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds, + ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts, + }; +} diff --git a/yarn-project/archiver/src/archiver/errors.ts b/yarn-project/archiver/src/errors.ts similarity index 100% rename from yarn-project/archiver/src/archiver/errors.ts rename to yarn-project/archiver/src/errors.ts diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index 18dac6c6796f..3528b23465d8 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -1,15 +1,30 @@ +import { EpochCache } from '@aztec/epoch-cache'; +import { createEthereumChain } from '@aztec/ethereum/chain'; +import { InboxContract, RollupContract } from '@aztec/ethereum/contracts'; +import type { ViemPublicDebugClient } from '@aztec/ethereum/types'; import { BlockNumber } from '@aztec/foundation/branded-types'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { merge } from '@aztec/foundation/collection'; +import { Fr } from '@aztec/foundation/curves/bn254'; import { createLogger } from '@aztec/foundation/log'; +import { DateProvider } from '@aztec/foundation/timer'; import type { DataStoreConfig } from '@aztec/kv-store/config'; import { createStore } from '@aztec/kv-store/lmdb-v2'; import { protocolContractNames } from '@aztec/protocol-contracts'; import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle'; import { FunctionType, decodeFunctionSignature } from '@aztec/stdlib/abi'; +import type { ArchiverEmitter } from '@aztec/stdlib/block'; import { type ContractClassPublic, computePublicBytecodeCommitment } from '@aztec/stdlib/contract'; +import { getTelemetryClient } from '@aztec/telemetry-client'; -import { Archiver, type ArchiverDeps } from './archiver/archiver.js'; -import type { ArchiverConfig } from './archiver/config.js'; -import { ARCHIVER_DB_VERSION, KVArchiverDataStore } from './archiver/kv_archiver_store/kv_archiver_store.js'; +import { EventEmitter } from 'events'; +import { createPublicClient, fallback, http } from 'viem'; + +import { Archiver, type ArchiverDeps } from './archiver.js'; +import { type ArchiverConfig, mapArchiverConfig } from './config.js'; +import { ArchiverInstrumentation } from './modules/instrumentation.js'; +import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; +import { ARCHIVER_DB_VERSION, KVArchiverDataStore } from './store/kv_archiver_store.js'; export const ARCHIVER_STORE_NAME = 'archiver'; @@ -28,9 +43,8 @@ export async function createArchiverStore( /** * Creates a local archiver. * @param config - The archiver configuration. - * @param blobClient - The blob client client. + * @param deps - The archiver dependencies (blobClient, epochCache, dateProvider, telemetry). * @param opts - The options. - * @param telemetry - The telemetry client. * @returns The local archiver. */ export async function createArchiver( @@ -40,7 +54,105 @@ export async function createArchiver( ): Promise { const archiverStore = await createArchiverStore(config); await registerProtocolContracts(archiverStore); - return Archiver.createAndSync(config, archiverStore, deps, opts.blockUntilSync); + + // Create Ethereum clients + const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId); + const publicClient = createPublicClient({ + chain: chain.chainInfo, + transport: fallback(config.l1RpcUrls.map(url => http(url, { batch: false }))), + pollingInterval: config.viemPollingIntervalMS, + }); + + // Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs + const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls; + const debugClient = createPublicClient({ + chain: chain.chainInfo, + transport: fallback(debugRpcUrls.map(url => http(url, { batch: false }))), + pollingInterval: config.viemPollingIntervalMS, + }) as ViemPublicDebugClient; + + // Create L1 contract instances + const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress); + const inbox = new InboxContract(publicClient, config.l1Contracts.inboxAddress); + + // Fetch L1 constants from rollup contract + const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] = + await Promise.all([ + rollup.getL1StartBlock(), + rollup.getL1GenesisTime(), + rollup.getProofSubmissionEpochs(), + rollup.getGenesisArchiveTreeRoot(), + rollup.getSlashingProposerAddress(), + ] as const); + + const l1StartBlockHash = await publicClient + .getBlock({ blockNumber: l1StartBlock, includeTransactions: false }) + .then(block => Buffer32.fromString(block.hash)); + + const { aztecEpochDuration: epochDuration, aztecSlotDuration: slotDuration, ethereumSlotDuration } = config; + + const l1Constants = { + l1StartBlockHash, + l1StartBlock, + l1GenesisTime, + epochDuration, + slotDuration, + ethereumSlotDuration, + proofSubmissionEpochs: Number(proofSubmissionEpochs), + genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()), + }; + + const archiverConfig = merge( + { + pollingIntervalMs: 10_000, + batchSize: 100, + maxAllowedEthClientDriftSeconds: 300, + ethereumAllowNoDebugHosts: false, + }, + mapArchiverConfig(config), + ); + + const epochCache = deps.epochCache ?? (await EpochCache.create(config.l1Contracts.rollupAddress, config, deps)); + const telemetry = deps.telemetry ?? getTelemetryClient(); + const instrumentation = await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()); + + // Create the event emitter that will be shared by archiver and synchronizer + const events = new EventEmitter() as ArchiverEmitter; + + // Create the L1 synchronizer + const synchronizer = new ArchiverL1Synchronizer( + publicClient, + debugClient, + rollup, + inbox, + { ...config.l1Contracts, slashingProposerAddress }, + archiverStore, + archiverConfig, + deps.blobClient, + epochCache, + deps.dateProvider ?? new DateProvider(), + instrumentation, + l1Constants, + events, + instrumentation.tracer, + ); + + const archiver = new Archiver( + publicClient, + debugClient, + rollup, + { ...config.l1Contracts, slashingProposerAddress }, + archiverStore, + archiverConfig, + deps.blobClient, + instrumentation, + l1Constants, + synchronizer, + events, + ); + + await archiver.start(opts.blockUntilSync); + return archiver; } async function registerProtocolContracts(store: KVArchiverDataStore) { diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 23b70eae8933..224884764f17 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -1,5 +1,12 @@ -export * from './archiver/index.js'; export * from './factory.js'; -export * from './rpc/index.js'; +export * from './interfaces.js'; +export * from './archiver.js'; +export * from './modules/data_source_base.js'; +export * from './modules/data_store_updater.js'; +export * from './config.js'; -export { retrieveCheckpointsFromRollup, retrieveL2ProofVerifiedEvents } from './archiver/l1/data_retrieval.js'; +export { type L1PublishedData } from './structs/published.js'; +export { KVArchiverDataStore, ARCHIVER_DB_VERSION } from './store/kv_archiver_store.js'; +export { ContractInstanceStore } from './store/contract_instance_store.js'; + +export { retrieveCheckpointsFromRollup, retrieveL2ProofVerifiedEvents } from './l1/data_retrieval.js'; diff --git a/yarn-project/archiver/src/interfaces.ts b/yarn-project/archiver/src/interfaces.ts new file mode 100644 index 000000000000..f9e32ed827a6 --- /dev/null +++ b/yarn-project/archiver/src/interfaces.ts @@ -0,0 +1,9 @@ +import type { L2BlockSource } from '@aztec/stdlib/block'; +import type { ContractDataSource } from '@aztec/stdlib/contract'; +import type { L2LogsSource } from '@aztec/stdlib/interfaces/server'; +import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; + +/** + * Helper interface to combine all sources this archiver implementation provides. + */ +export type ArchiverDataSource = L2BlockSource & L2LogsSource & ContractDataSource & L1ToL2MessageSource; diff --git a/yarn-project/archiver/src/archiver/l1/README.md b/yarn-project/archiver/src/l1/README.md similarity index 100% rename from yarn-project/archiver/src/archiver/l1/README.md rename to yarn-project/archiver/src/l1/README.md diff --git a/yarn-project/archiver/src/archiver/l1/bin/retrieve-calldata.ts b/yarn-project/archiver/src/l1/bin/retrieve-calldata.ts similarity index 100% rename from yarn-project/archiver/src/archiver/l1/bin/retrieve-calldata.ts rename to yarn-project/archiver/src/l1/bin/retrieve-calldata.ts diff --git a/yarn-project/archiver/src/archiver/l1/calldata_retriever.test.ts b/yarn-project/archiver/src/l1/calldata_retriever.test.ts similarity index 99% rename from yarn-project/archiver/src/archiver/l1/calldata_retriever.test.ts rename to yarn-project/archiver/src/l1/calldata_retriever.test.ts index 19f2d183c881..9dd7f5e7187a 100644 --- a/yarn-project/archiver/src/archiver/l1/calldata_retriever.test.ts +++ b/yarn-project/archiver/src/l1/calldata_retriever.test.ts @@ -25,7 +25,7 @@ import { toFunctionSelector, } from 'viem'; -import type { ArchiverInstrumentation } from '../instrumentation.js'; +import type { ArchiverInstrumentation } from '../modules/instrumentation.js'; import { CalldataRetriever } from './calldata_retriever.js'; import { EIP1967_IMPLEMENTATION_SLOT, diff --git a/yarn-project/archiver/src/archiver/l1/calldata_retriever.ts b/yarn-project/archiver/src/l1/calldata_retriever.ts similarity index 99% rename from yarn-project/archiver/src/archiver/l1/calldata_retriever.ts rename to yarn-project/archiver/src/l1/calldata_retriever.ts index c8d0957509b8..84f227b9617b 100644 --- a/yarn-project/archiver/src/archiver/l1/calldata_retriever.ts +++ b/yarn-project/archiver/src/l1/calldata_retriever.ts @@ -28,7 +28,7 @@ import { toFunctionSelector, } from 'viem'; -import type { ArchiverInstrumentation } from '../instrumentation.js'; +import type { ArchiverInstrumentation } from '../modules/instrumentation.js'; import { getSuccessfulCallsFromDebug } from './debug_tx.js'; import { getCallFromSpireProposer } from './spire_proposer.js'; import { getSuccessfulCallsFromTrace } from './trace_tx.js'; diff --git a/yarn-project/archiver/src/archiver/l1/data_retrieval.ts b/yarn-project/archiver/src/l1/data_retrieval.ts similarity index 99% rename from yarn-project/archiver/src/archiver/l1/data_retrieval.ts rename to yarn-project/archiver/src/l1/data_retrieval.ts index 55db67f6dc2e..94440d834cc3 100644 --- a/yarn-project/archiver/src/archiver/l1/data_retrieval.ts +++ b/yarn-project/archiver/src/l1/data_retrieval.ts @@ -30,7 +30,7 @@ import { BlockHeader, GlobalVariables, PartialStateReference, StateReference } f import { type Hex, decodeFunctionData, getAbiItem, hexToBytes } from 'viem'; import { NoBlobBodiesFoundError } from '../errors.js'; -import type { ArchiverInstrumentation } from '../instrumentation.js'; +import type { ArchiverInstrumentation } from '../modules/instrumentation.js'; import type { DataRetrieval } from '../structs/data_retrieval.js'; import type { InboxMessage } from '../structs/inbox_message.js'; import { CalldataRetriever } from './calldata_retriever.js'; diff --git a/yarn-project/archiver/src/archiver/l1/debug_tx.test.ts b/yarn-project/archiver/src/l1/debug_tx.test.ts similarity index 97% rename from yarn-project/archiver/src/archiver/l1/debug_tx.test.ts rename to yarn-project/archiver/src/l1/debug_tx.test.ts index 0db0c8c5c367..6bba93350e74 100644 --- a/yarn-project/archiver/src/archiver/l1/debug_tx.test.ts +++ b/yarn-project/archiver/src/l1/debug_tx.test.ts @@ -4,8 +4,8 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { type MockProxy, mock } from 'jest-mock-extended'; import type { Hex } from 'viem'; -import debugTraceMultipleProposeFixture from '../../test/fixtures/debug_traceTransaction-multiplePropose.json' with { type: 'json' }; -import debugTraceFixture from '../../test/fixtures/debug_traceTransaction-proxied.json' with { type: 'json' }; +import debugTraceMultipleProposeFixture from '../test/fixtures/debug_traceTransaction-multiplePropose.json' with { type: 'json' }; +import debugTraceFixture from '../test/fixtures/debug_traceTransaction-proxied.json' with { type: 'json' }; import { getSuccessfulCallsFromDebug } from './debug_tx.js'; describe('getSuccessfulCallsFromDebug', () => { diff --git a/yarn-project/archiver/src/archiver/l1/debug_tx.ts b/yarn-project/archiver/src/l1/debug_tx.ts similarity index 100% rename from yarn-project/archiver/src/archiver/l1/debug_tx.ts rename to yarn-project/archiver/src/l1/debug_tx.ts diff --git a/yarn-project/archiver/src/archiver/l1/spire_proposer.test.ts b/yarn-project/archiver/src/l1/spire_proposer.test.ts similarity index 100% rename from yarn-project/archiver/src/archiver/l1/spire_proposer.test.ts rename to yarn-project/archiver/src/l1/spire_proposer.test.ts diff --git a/yarn-project/archiver/src/archiver/l1/spire_proposer.ts b/yarn-project/archiver/src/l1/spire_proposer.ts similarity index 100% rename from yarn-project/archiver/src/archiver/l1/spire_proposer.ts rename to yarn-project/archiver/src/l1/spire_proposer.ts diff --git a/yarn-project/archiver/src/archiver/l1/trace_tx.test.ts b/yarn-project/archiver/src/l1/trace_tx.test.ts similarity index 96% rename from yarn-project/archiver/src/archiver/l1/trace_tx.test.ts rename to yarn-project/archiver/src/l1/trace_tx.test.ts index d13c26da3d57..ddedf955846d 100644 --- a/yarn-project/archiver/src/archiver/l1/trace_tx.test.ts +++ b/yarn-project/archiver/src/l1/trace_tx.test.ts @@ -4,9 +4,9 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { jest } from '@jest/globals'; import type { Hex } from 'viem'; -import traceTransactionMultipleProposeFixture from '../../test/fixtures/trace_transaction-multiplePropose.json' with { type: 'json' }; -import traceTransactionFixture from '../../test/fixtures/trace_transaction-proxied.json' with { type: 'json' }; -import traceTransactionRevertFixture from '../../test/fixtures/trace_transaction-randomRevert.json' with { type: 'json' }; +import traceTransactionMultipleProposeFixture from '../test/fixtures/trace_transaction-multiplePropose.json' with { type: 'json' }; +import traceTransactionFixture from '../test/fixtures/trace_transaction-proxied.json' with { type: 'json' }; +import traceTransactionRevertFixture from '../test/fixtures/trace_transaction-randomRevert.json' with { type: 'json' }; import { getSuccessfulCallsFromTrace } from './trace_tx.js'; describe('getSuccessfulCallsFromTrace', () => { diff --git a/yarn-project/archiver/src/archiver/l1/trace_tx.ts b/yarn-project/archiver/src/l1/trace_tx.ts similarity index 100% rename from yarn-project/archiver/src/archiver/l1/trace_tx.ts rename to yarn-project/archiver/src/l1/trace_tx.ts diff --git a/yarn-project/archiver/src/archiver/l1/types.ts b/yarn-project/archiver/src/l1/types.ts similarity index 100% rename from yarn-project/archiver/src/archiver/l1/types.ts rename to yarn-project/archiver/src/l1/types.ts diff --git a/yarn-project/archiver/src/archiver/l1/validate_trace.ts b/yarn-project/archiver/src/l1/validate_trace.ts similarity index 100% rename from yarn-project/archiver/src/archiver/l1/validate_trace.ts rename to yarn-project/archiver/src/l1/validate_trace.ts diff --git a/yarn-project/archiver/src/archiver/archive_source_base.ts b/yarn-project/archiver/src/modules/data_source_base.ts similarity index 67% rename from yarn-project/archiver/src/archiver/archive_source_base.ts rename to yarn-project/archiver/src/modules/data_source_base.ts index 131bbe719d3a..75227cc025c8 100644 --- a/yarn-project/archiver/src/archiver/archive_source_base.ts +++ b/yarn-project/archiver/src/modules/data_source_base.ts @@ -14,33 +14,32 @@ import { } from '@aztec/stdlib/block'; import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; -import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; +import { type L1RollupConstants, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; import type { GetContractClassLogsResponse, GetPublicLogsResponse } from '@aztec/stdlib/interfaces/client'; import type { L2LogsSource } from '@aztec/stdlib/interfaces/server'; import type { LogFilter, SiloedTag, Tag, TxScopedL2Log } from '@aztec/stdlib/logs'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; +import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, IndexedTxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx'; import type { UInt64 } from '@aztec/stdlib/types'; -import type { ArchiveSource } from './archiver.js'; -import type { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; +import type { ArchiverDataSource } from '../interfaces.js'; +import type { CheckpointData } from '../store/block_store.js'; +import type { KVArchiverDataStore } from '../store/kv_archiver_store.js'; import type { ValidateCheckpointResult } from './validation.js'; /** - * Abstract base class implementing ArchiveSource using a KVArchiverDataStore. + * Abstract base class implementing ArchiverDataSource using a KVArchiverDataStore. * Provides implementations for all store-delegating methods and declares abstract methods * for L1-dependent functionality that subclasses must implement. */ -export abstract class ArchiveSourceBase - implements ArchiveSource, L2LogsSource, ContractDataSource, L1ToL2MessageSource +export abstract class ArchiverDataSourceBase + implements ArchiverDataSource, L2LogsSource, ContractDataSource, L1ToL2MessageSource { - protected readonly store: KVArchiverDataStore; - - constructor(store: KVArchiverDataStore) { - this.store = store; - } - - // Abstract methods that require L1 dependencies + constructor( + protected readonly store: KVArchiverDataStore, + protected readonly l1Constants?: L1RollupConstants, + ) {} abstract getRollupAddress(): Promise; @@ -58,17 +57,21 @@ export abstract class ArchiveSourceBase abstract getL2EpochNumber(): Promise; - abstract getCheckpointsForEpoch(epochNumber: EpochNumber): Promise; - - abstract getBlocksForEpoch(epochNumber: EpochNumber): Promise; - - abstract getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise; - abstract isEpochComplete(epochNumber: EpochNumber): Promise; abstract syncImmediate(): Promise; - // Store-delegating methods + public getCheckpointNumber(): Promise { + return this.store.getSynchedCheckpointNumber(); + } + + public getSynchedCheckpointNumber(): Promise { + return this.store.getSynchedCheckpointNumber(); + } + + public getProvenCheckpointNumber(): Promise { + return this.store.getProvenCheckpointNumber(); + } public getBlockNumber(): Promise { return this.store.getLatestBlockNumber(); @@ -91,6 +94,32 @@ export abstract class ArchiveSourceBase return this.store.getCheckpointedBlock(number); } + public getCheckpointedBlockNumber(): Promise { + return this.store.getCheckpointedL2BlockNumber(); + } + + public async getCheckpointHeader(number: CheckpointNumber | 'latest'): Promise { + if (number === 'latest') { + number = await this.store.getSynchedCheckpointNumber(); + } + if (number === 0) { + return undefined; + } + const checkpoint = await this.store.getCheckpointData(number); + if (!checkpoint) { + return undefined; + } + return checkpoint.header; + } + + public async getLastBlockNumberInCheckpoint(checkpointNumber: CheckpointNumber): Promise { + const checkpointData = await this.store.getCheckpointData(checkpointNumber); + if (!checkpointData) { + return undefined; + } + return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1); + } + public async getCheckpointedBlocks( from: BlockNumber, limit: number, @@ -151,8 +180,6 @@ export abstract class ArchiveSourceBase return blocks; } - // L2LogsSource methods - public getPrivateLogsByTags(tags: SiloedTag[]): Promise { return this.store.getPrivateLogsByTags(tags); } @@ -169,8 +196,6 @@ export abstract class ArchiveSourceBase return this.store.getContractClassLogs(filter); } - // ContractDataSource methods - public getContractClass(id: Fr): Promise { return this.store.getContractClass(id); } @@ -207,8 +232,6 @@ export abstract class ArchiveSourceBase return this.store.registerContractFunctionSignatures(signatures); } - // L1ToL2MessageSource methods - public getL1ToL2Messages(checkpointNumber: CheckpointNumber): Promise { return this.store.getL1ToL2Messages(checkpointNumber); } @@ -217,8 +240,6 @@ export abstract class ArchiveSourceBase return this.store.getL1ToL2MessageIndex(l1ToL2Message); } - // Published checkpoint methods - public async getPublishedCheckpoints( checkpointNumber: CheckpointNumber, limit: number, @@ -248,6 +269,87 @@ export abstract class ArchiveSourceBase return fullCheckpoints; } + public async getBlocksForEpoch(epochNumber: EpochNumber): Promise { + if (!this.l1Constants) { + throw new Error('L1 constants not set'); + } + + const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1Constants); + const blocks: L2Block[] = []; + + // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. + // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. + let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); + const slot = (b: CheckpointData) => b.header.slotNumber; + while (checkpoint && slot(checkpoint) >= start) { + if (slot(checkpoint) <= end) { + // push the blocks on backwards + const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1; + for (let i = endBlock; i >= checkpoint.startBlock; i--) { + const block = await this.getBlock(BlockNumber(i)); + if (block) { + blocks.push(block); + } + } + } + checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1)); + } + + return blocks.reverse(); + } + + public async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise { + if (!this.l1Constants) { + throw new Error('L1 constants not set'); + } + + const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1Constants); + const blocks: BlockHeader[] = []; + + // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. + // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. + let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); + const slot = (b: CheckpointData) => b.header.slotNumber; + while (checkpoint && slot(checkpoint) >= start) { + if (slot(checkpoint) <= end) { + // push the blocks on backwards + const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1; + for (let i = endBlock; i >= checkpoint.startBlock; i--) { + const block = await this.getBlockHeader(BlockNumber(i)); + if (block) { + blocks.push(block); + } + } + } + checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1)); + } + return blocks.reverse(); + } + + public async getCheckpointsForEpoch(epochNumber: EpochNumber): Promise { + if (!this.l1Constants) { + throw new Error('L1 constants not set'); + } + + const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1Constants); + const checkpoints: Checkpoint[] = []; + + // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. + // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. + let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); + const slot = (b: CheckpointData) => b.header.slotNumber; + while (checkpointData && slot(checkpointData) >= start) { + if (slot(checkpointData) <= end) { + // push the checkpoints on backwards + const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1); + checkpoints.push(checkpoint.checkpoint); + } + checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1)); + } + + return checkpoints.reverse(); + } + public async getPublishedBlocks(from: BlockNumber, limit: number, proven?: boolean): Promise { const checkpoints = await this.store.getRangeOfCheckpoints(CheckpointNumber(from), limit); const provenCheckpointNumber = await this.store.getProvenCheckpointNumber(); @@ -280,8 +382,6 @@ export abstract class ArchiveSourceBase return olbBlocks; } - // Legacy APIs - public async getBlock(number: BlockNumber): Promise { // If the number provided is -ve, then return the latest block. if (number < 0) { diff --git a/yarn-project/archiver/src/modules/data_store_updater.ts b/yarn-project/archiver/src/modules/data_store_updater.ts new file mode 100644 index 000000000000..bbc4b91440ab --- /dev/null +++ b/yarn-project/archiver/src/modules/data_store_updater.ts @@ -0,0 +1,318 @@ +import type { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { createLogger } from '@aztec/foundation/log'; +import { + ContractClassPublishedEvent, + PrivateFunctionBroadcastedEvent, + UtilityFunctionBroadcastedEvent, +} from '@aztec/protocol-contracts/class-registry'; +import { + ContractInstancePublishedEvent, + ContractInstanceUpdatedEvent, +} from '@aztec/protocol-contracts/instance-registry'; +import type { L2BlockNew, ValidateCheckpointResult } from '@aztec/stdlib/block'; +import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { + type ExecutablePrivateFunctionWithMembershipProof, + type UtilityFunctionWithMembershipProof, + computePublicBytecodeCommitment, + isValidPrivateFunctionMembershipProof, + isValidUtilityFunctionMembershipProof, +} from '@aztec/stdlib/contract'; +import type { ContractClassLog, PrivateLog, PublicLog } from '@aztec/stdlib/logs'; +import type { UInt64 } from '@aztec/stdlib/types'; + +import groupBy from 'lodash.groupby'; + +import type { KVArchiverDataStore } from '../store/kv_archiver_store.js'; + +/** Operation type for contract data updates. */ +enum Operation { + Store, + Delete, +} + +/** Archiver helper module to handle updates to the data store. */ +export class ArchiverDataStoreUpdater { + private readonly log = createLogger('archiver:store_updater'); + + constructor(private store: KVArchiverDataStore) {} + + /** + * Adds blocks to the store with contract class/instance extraction from logs. + * Extracts ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated events, + * and individually broadcasted functions from the block logs. + * + * @param blocks - The L2 blocks to add. + * @param pendingChainValidationStatus - Optional validation status to set. + * @returns True if the operation is successful. + */ + public addBlocksWithContractData( + blocks: L2BlockNew[], + pendingChainValidationStatus?: ValidateCheckpointResult, + ): Promise { + return this.store.transactionAsync(async () => { + await this.store.addBlocks(blocks); + + const opResults = await Promise.all([ + // Update the pending chain validation status if provided + pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus), + // Add any logs emitted during the retrieved blocks + this.store.addLogs(blocks), + // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them + ...blocks.map(block => this.addBlockDataToDB(block)), + ]); + + return opResults.every(Boolean); + }); + } + + /** + * Adds checkpoints to the store with contract class/instance extraction from logs. + * Extracts ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated events, + * and individually broadcasted functions from the checkpoint block logs. + * + * @param checkpoints - The published checkpoints to add. + * @param pendingChainValidationStatus - Optional validation status to set. + * @returns True if the operation is successful. + */ + public addCheckpointsWithContractData( + checkpoints: PublishedCheckpoint[], + pendingChainValidationStatus?: ValidateCheckpointResult, + ): Promise { + return this.store.transactionAsync(async () => { + await this.store.addCheckpoints(checkpoints); + const allBlocks = checkpoints.flatMap((ch: PublishedCheckpoint) => ch.checkpoint.blocks); + + const opResults = await Promise.all([ + // Update the pending chain validation status if provided + pendingChainValidationStatus && this.store.setPendingChainValidationStatus(pendingChainValidationStatus), + // Add any logs emitted during the retrieved blocks + this.store.addLogs(allBlocks), + // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them + ...allBlocks.map(block => this.addBlockDataToDB(block)), + ]); + + return opResults.every(Boolean); + }); + } + + /** + * Unwinds checkpoints from the store with reverse contract extraction. + * Deletes ContractClassPublished, ContractInstancePublished, ContractInstanceUpdated data + * that was stored for the unwound checkpoints. + * + * @param from - The checkpoint number to unwind from (must be the current tip). + * @param checkpointsToUnwind - The number of checkpoints to unwind. + * @returns True if the operation is successful. + */ + public async unwindCheckpointsWithContractData( + from: CheckpointNumber, + checkpointsToUnwind: number, + ): Promise { + if (checkpointsToUnwind <= 0) { + throw new Error(`Cannot unwind ${checkpointsToUnwind} blocks`); + } + + const last = await this.store.getSynchedCheckpointNumber(); + if (from != last) { + throw new Error(`Cannot unwind checkpoints from checkpoint ${from} when the last checkpoint is ${last}`); + } + + const blocks = []; + const lastCheckpointNumber = from + checkpointsToUnwind - 1; + for (let checkpointNumber = from; checkpointNumber <= lastCheckpointNumber; checkpointNumber++) { + const blocksForCheckpoint = await this.store.getBlocksForCheckpoint(checkpointNumber); + if (!blocksForCheckpoint) { + continue; + } + blocks.push(...blocksForCheckpoint); + } + + const opResults = await Promise.all([ + // Prune rolls back to the last proven block, which is by definition valid + this.store.setPendingChainValidationStatus({ valid: true }), + // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them + ...blocks.map(async block => { + const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs); + // ContractInstancePublished event logs are broadcast in privateLogs. + const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs); + const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs); + + return ( + await Promise.all([ + this.updatePublishedContractClasses(contractClassLogs, block.number, Operation.Delete), + this.updateDeployedContractInstances(privateLogs, block.number, Operation.Delete), + this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Delete), + ]) + ).every(Boolean); + }), + + this.store.deleteLogs(blocks), + this.store.unwindCheckpoints(from, checkpointsToUnwind), + ]); + + return opResults.every(Boolean); + } + + /** + * Extracts and stores contract data from a single block. + */ + private async addBlockDataToDB(block: L2BlockNew): Promise { + const contractClassLogs = block.body.txEffects.flatMap(txEffect => txEffect.contractClassLogs); + // ContractInstancePublished event logs are broadcast in privateLogs. + const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs); + const publicLogs = block.body.txEffects.flatMap(txEffect => txEffect.publicLogs); + + return ( + await Promise.all([ + this.updatePublishedContractClasses(contractClassLogs, block.number, Operation.Store), + this.updateDeployedContractInstances(privateLogs, block.number, Operation.Store), + this.updateUpdatedContractInstances(publicLogs, block.header.globalVariables.timestamp, Operation.Store), + this.storeBroadcastedIndividualFunctions(contractClassLogs, block.number), + ]) + ).every(Boolean); + } + + /** + * Extracts and stores contract classes out of ContractClassPublished events emitted by the class registry contract. + */ + private async updatePublishedContractClasses( + allLogs: ContractClassLog[], + blockNum: BlockNumber, + operation: Operation, + ): Promise { + const contractClassPublishedEvents = allLogs + .filter(log => ContractClassPublishedEvent.isContractClassPublishedEvent(log)) + .map(log => ContractClassPublishedEvent.fromLog(log)); + + const contractClasses = await Promise.all(contractClassPublishedEvents.map(e => e.toContractClassPublic())); + if (contractClasses.length > 0) { + contractClasses.forEach(c => this.log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`)); + if (operation == Operation.Store) { + // TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive + const commitments = await Promise.all( + contractClasses.map(c => computePublicBytecodeCommitment(c.packedBytecode)), + ); + return await this.store.addContractClasses(contractClasses, commitments, blockNum); + } else if (operation == Operation.Delete) { + return await this.store.deleteContractClasses(contractClasses, blockNum); + } + } + return true; + } + + /** + * Extracts and stores contract instances out of ContractInstancePublished events emitted by the canonical deployer contract. + */ + private async updateDeployedContractInstances( + allLogs: PrivateLog[], + blockNum: BlockNumber, + operation: Operation, + ): Promise { + const contractInstances = allLogs + .filter(log => ContractInstancePublishedEvent.isContractInstancePublishedEvent(log)) + .map(log => ContractInstancePublishedEvent.fromLog(log)) + .map(e => e.toContractInstance()); + if (contractInstances.length > 0) { + contractInstances.forEach(c => + this.log.verbose(`${Operation[operation]} contract instance at ${c.address.toString()}`), + ); + if (operation == Operation.Store) { + return await this.store.addContractInstances(contractInstances, blockNum); + } else if (operation == Operation.Delete) { + return await this.store.deleteContractInstances(contractInstances, blockNum); + } + } + return true; + } + + /** + * Extracts and stores contract instance updates out of ContractInstanceUpdated events. + */ + private async updateUpdatedContractInstances( + allLogs: PublicLog[], + timestamp: UInt64, + operation: Operation, + ): Promise { + const contractUpdates = allLogs + .filter(log => ContractInstanceUpdatedEvent.isContractInstanceUpdatedEvent(log)) + .map(log => ContractInstanceUpdatedEvent.fromLog(log)) + .map(e => e.toContractInstanceUpdate()); + + if (contractUpdates.length > 0) { + contractUpdates.forEach(c => + this.log.verbose(`${Operation[operation]} contract instance update at ${c.address.toString()}`), + ); + if (operation == Operation.Store) { + return await this.store.addContractInstanceUpdates(contractUpdates, timestamp); + } else if (operation == Operation.Delete) { + return await this.store.deleteContractInstanceUpdates(contractUpdates, timestamp); + } + } + return true; + } + + /** + * Stores the functions that were broadcasted individually. + * + * @dev Beware that there is not a delete variant of this, since they are added to contract classes + * and will be deleted as part of the class if needed. + */ + private async storeBroadcastedIndividualFunctions( + allLogs: ContractClassLog[], + _blockNum: BlockNumber, + ): Promise { + // Filter out private and utility function broadcast events + const privateFnEvents = allLogs + .filter(log => PrivateFunctionBroadcastedEvent.isPrivateFunctionBroadcastedEvent(log)) + .map(log => PrivateFunctionBroadcastedEvent.fromLog(log)); + const utilityFnEvents = allLogs + .filter(log => UtilityFunctionBroadcastedEvent.isUtilityFunctionBroadcastedEvent(log)) + .map(log => UtilityFunctionBroadcastedEvent.fromLog(log)); + + // Group all events by contract class id + for (const [classIdString, classEvents] of Object.entries( + groupBy([...privateFnEvents, ...utilityFnEvents], e => e.contractClassId.toString()), + )) { + const contractClassId = Fr.fromHexString(classIdString); + const contractClass = await this.store.getContractClass(contractClassId); + if (!contractClass) { + this.log.warn(`Skipping broadcasted functions as contract class ${contractClassId.toString()} was not found`); + continue; + } + + // Split private and utility functions, and filter out invalid ones + const allFns = classEvents.map(e => e.toFunctionWithMembershipProof()); + const privateFns = allFns.filter( + (fn): fn is ExecutablePrivateFunctionWithMembershipProof => 'utilityFunctionsTreeRoot' in fn, + ); + const utilityFns = allFns.filter( + (fn): fn is UtilityFunctionWithMembershipProof => 'privateFunctionsArtifactTreeRoot' in fn, + ); + + const privateFunctionsWithValidity = await Promise.all( + privateFns.map(async fn => ({ fn, valid: await isValidPrivateFunctionMembershipProof(fn, contractClass) })), + ); + const validPrivateFns = privateFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn); + const utilityFunctionsWithValidity = await Promise.all( + utilityFns.map(async fn => ({ + fn, + valid: await isValidUtilityFunctionMembershipProof(fn, contractClass), + })), + ); + const validUtilityFns = utilityFunctionsWithValidity.filter(({ valid }) => valid).map(({ fn }) => fn); + const validFnCount = validPrivateFns.length + validUtilityFns.length; + if (validFnCount !== allFns.length) { + this.log.warn(`Skipping ${allFns.length - validFnCount} invalid functions`); + } + + // Store the functions in the contract class in a single operation + if (validFnCount > 0) { + this.log.verbose(`Storing ${validFnCount} functions for contract class ${contractClassId.toString()}`); + } + return await this.store.addFunctions(contractClassId, validPrivateFns, validUtilityFns); + } + return true; + } +} diff --git a/yarn-project/archiver/src/archiver/instrumentation.ts b/yarn-project/archiver/src/modules/instrumentation.ts similarity index 100% rename from yarn-project/archiver/src/archiver/instrumentation.ts rename to yarn-project/archiver/src/modules/instrumentation.ts diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts similarity index 56% rename from yarn-project/archiver/src/archiver/archiver.ts rename to yarn-project/archiver/src/modules/l1_synchronizer.ts index d908d66a6487..7d07a7091e1c 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -1,159 +1,59 @@ import type { BlobClientInterface } from '@aztec/blob-client/client'; -import { GENESIS_BLOCK_HEADER_HASH, INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { EpochCache } from '@aztec/epoch-cache'; -import { createEthereumChain } from '@aztec/ethereum/chain'; -import { BlockTagTooOldError, InboxContract, RollupContract } from '@aztec/ethereum/contracts'; +import { InboxContract, RollupContract } from '@aztec/ethereum/contracts'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; import type { L1BlockId } from '@aztec/ethereum/l1-types'; import type { ViemPublicClient, ViemPublicDebugClient } from '@aztec/ethereum/types'; import { maxBigint } from '@aztec/foundation/bigint'; -import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; -import { merge, pick } from '@aztec/foundation/collection'; +import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; +import { Buffer32 } from '@aztec/foundation/buffer'; +import { pick } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createLogger } from '@aztec/foundation/log'; -import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise'; -import { RunningPromise, makeLoggingErrorHandler } from '@aztec/foundation/running-promise'; import { count } from '@aztec/foundation/string'; import { DateProvider, Timer, elapsed } from '@aztec/foundation/timer'; import { isDefined } from '@aztec/foundation/types'; -import { - type ArchiverEmitter, - type CheckpointId, - GENESIS_CHECKPOINT_HEADER_HASH, - L2Block, - L2BlockNew, - type L2BlockSink, - type L2BlockSource, - L2BlockSourceEvents, - type L2Tips, -} from '@aztec/stdlib/block'; -import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; -import type { ContractDataSource } from '@aztec/stdlib/contract'; -import { - type L1RollupConstants, - getEpochAtSlot, - getEpochNumberAtTimestamp, - getSlotAtTimestamp, - getSlotRangeForEpoch, - getTimestampRangeForEpoch, -} from '@aztec/stdlib/epoch-helpers'; -import type { L2LogsSource } from '@aztec/stdlib/interfaces/server'; -import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; +import { type ArchiverEmitter, L2BlockSourceEvents, type ValidateCheckpointResult } from '@aztec/stdlib/block'; +import { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; -import type { CheckpointHeader } from '@aztec/stdlib/rollup'; -import type { BlockHeader } from '@aztec/stdlib/tx'; -import { - type TelemetryClient, - type Traceable, - type Tracer, - execInSpan, - getTelemetryClient, - trackSpan, -} from '@aztec/telemetry-client'; - -import { EventEmitter } from 'events'; -import { type Hex, createPublicClient, fallback, http } from 'viem'; - -import { ArchiveSourceBase } from './archive_source_base.js'; -import { - addBlocksWithContractData, - addCheckpointsWithContractData, - unwindCheckpointsWithContractData, -} from './archiver_store_updates.js'; -import type { ArchiverConfig } from './config.js'; -import { InitialCheckpointNumberNotSequentialError, NoBlobBodiesFoundError } from './errors.js'; -import { ArchiverInstrumentation } from './instrumentation.js'; -import type { CheckpointData } from './kv_archiver_store/block_store.js'; -import type { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; +import { type Traceable, type Tracer, execInSpan, trackSpan } from '@aztec/telemetry-client'; + +import { InitialCheckpointNumberNotSequentialError } from '../errors.js'; import { retrieveCheckpointsFromRollup, retrieveL1ToL2Message, retrieveL1ToL2Messages, retrievedToPublishedCheckpoint, -} from './l1/data_retrieval.js'; -import { validateAndLogTraceAvailability } from './l1/validate_trace.js'; -import type { InboxMessage } from './structs/inbox_message.js'; -import { type ValidateCheckpointResult, validateCheckpointAttestations } from './validation.js'; - -/** - * Helper interface to combine all sources this archiver implementation provides. - */ -export type ArchiveSource = L2BlockSource & L2LogsSource & ContractDataSource & L1ToL2MessageSource; - -/** Request to add a block to the archiver, queued for processing by the sync loop. */ -type AddBlockRequest = { - block: L2BlockNew; - resolve: () => void; - reject: (err: Error) => void; -}; - -export type ArchiverDeps = { - telemetry?: TelemetryClient; - blobClient: BlobClientInterface; - epochCache?: EpochCache; - dateProvider?: DateProvider; -}; - -function mapArchiverConfig(config: Partial) { - return { - pollingIntervalMs: config.archiverPollingIntervalMS, - batchSize: config.archiverBatchSize, - skipValidateCheckpointAttestations: config.skipValidateCheckpointAttestations, - maxAllowedEthClientDriftSeconds: config.maxAllowedEthClientDriftSeconds, - ethereumAllowNoDebugHosts: config.ethereumAllowNoDebugHosts, - }; -} +} from '../l1/data_retrieval.js'; +import type { KVArchiverDataStore } from '../store/kv_archiver_store.js'; +import type { InboxMessage } from '../structs/inbox_message.js'; +import { ArchiverDataStoreUpdater } from './data_store_updater.js'; +import type { ArchiverInstrumentation } from './instrumentation.js'; +import { validateCheckpointAttestations } from './validation.js'; type RollupStatus = { provenCheckpointNumber: CheckpointNumber; - provenArchive: Hex; + provenArchive: string; pendingCheckpointNumber: CheckpointNumber; - pendingArchive: Hex; + pendingArchive: string; validationResult: ValidateCheckpointResult | undefined; lastRetrievedCheckpoint?: PublishedCheckpoint; lastL1BlockWithCheckpoint?: bigint; }; /** - * Pulls checkpoints in a non-blocking manner and provides interface for their retrieval. - * Responsible for handling robust L1 polling so that other components do not need to - * concern themselves with it. + * Handles L1 synchronization for the archiver. + * Responsible for fetching checkpoints, L1→L2 messages, and handling L1 reorgs. */ -export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceable { - /** Event emitter for archiver events (L2BlockProven, L2PruneDetected, etc). */ - public readonly events: ArchiverEmitter = new EventEmitter() as ArchiverEmitter; - - /** A loop in which we will be continually fetching new checkpoints. */ - private runningPromise: RunningPromise; - +export class ArchiverL1Synchronizer implements Traceable { private l1BlockNumber: bigint | undefined; private l1Timestamp: bigint | undefined; - private initialSyncComplete: boolean = false; - private initialSyncPromise: PromiseWithResolvers; - - /** Queue of blocks to be added to the store, processed by the sync loop. */ - private blockQueue: AddBlockRequest[] = []; + private readonly updater: ArchiverDataStoreUpdater; public readonly tracer: Tracer; - /** - * Creates a new instance of the Archiver. - * @param publicClient - A client for interacting with the Ethereum node. - * @param debugClient - A client for interacting with the Ethereum node for debug/trace methods. - * @param rollup - Rollup contract instance. - * @param inbox - Inbox contract instance. - * @param l1Addresses - L1 contract addresses (registry, governance proposer, slash factory, slashing proposer). - * @param dataStore - An archiver data store for storage & retrieval of blocks, encrypted logs & contract data. - * @param config - Archiver configuration options. - * @param blobClient - Client for retrieving blob data. - * @param epochCache - Cache for epoch-related data. - * @param dateProvider - Provider for current date/time. - * @param instrumentation - Instrumentation for metrics and tracing. - * @param l1constants - L1 rollup constants. - * @param log - A logger. - */ constructor( private readonly publicClient: ViemPublicClient, private readonly debugClient: ViemPublicDebugClient, @@ -163,213 +63,37 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl L1ContractAddresses, 'registryAddress' | 'governanceProposerAddress' | 'slashFactoryAddress' > & { slashingProposerAddress: EthAddress }, - readonly dataStore: KVArchiverDataStore, - private config: { - pollingIntervalMs: number; + private readonly store: KVArchiverDataStore, + private readonly config: { batchSize: number; skipValidateCheckpointAttestations?: boolean; maxAllowedEthClientDriftSeconds: number; - ethereumAllowNoDebugHosts?: boolean; }, private readonly blobClient: BlobClientInterface, private readonly epochCache: EpochCache, private readonly dateProvider: DateProvider, private readonly instrumentation: ArchiverInstrumentation, private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, - private readonly log: Logger = createLogger('archiver'), + private readonly events: ArchiverEmitter, + tracer: Tracer, + private readonly log: Logger = createLogger('archiver:l1-sync'), ) { - super(dataStore); - - this.tracer = instrumentation.tracer; - this.initialSyncPromise = promiseWithResolvers(); - - // Running promise starts with a small interval inbetween runs, so all iterations needed for the initial sync - // are done as fast as possible. This then gets updated once the initial sync completes. - this.runningPromise = new RunningPromise( - () => this.sync(), - this.log, - this.config.pollingIntervalMs / 10, - makeLoggingErrorHandler(this.log, NoBlobBodiesFoundError, BlockTagTooOldError), - ); + this.updater = new ArchiverDataStoreUpdater(this.store); + this.tracer = tracer; } - /** - * Creates a new instance of the Archiver and blocks until it syncs from chain. - * @param config - The archiver's desired configuration. - * @param archiverStore - The backing store for the archiver. - * @param blockUntilSynced - If true, blocks until the archiver has fully synced. - * @returns - An instance of the archiver. - */ - public static async createAndSync( - config: ArchiverConfig, - archiverStore: KVArchiverDataStore, - deps: ArchiverDeps, - blockUntilSynced = true, - ): Promise { - const chain = createEthereumChain(config.l1RpcUrls, config.l1ChainId); - const publicClient = createPublicClient({ - chain: chain.chainInfo, - transport: fallback(config.l1RpcUrls.map(url => http(url, { batch: false }))), - pollingInterval: config.viemPollingIntervalMS, - }); - - // Create debug client using debug RPC URLs if available, otherwise fall back to regular RPC URLs - const debugRpcUrls = config.l1DebugRpcUrls.length > 0 ? config.l1DebugRpcUrls : config.l1RpcUrls; - const debugClient = createPublicClient({ - chain: chain.chainInfo, - transport: fallback(debugRpcUrls.map(url => http(url, { batch: false }))), - pollingInterval: config.viemPollingIntervalMS, - }) as ViemPublicDebugClient; - - const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress); - const inbox = new InboxContract(publicClient, config.l1Contracts.inboxAddress); - - const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot, slashingProposerAddress] = - await Promise.all([ - rollup.getL1StartBlock(), - rollup.getL1GenesisTime(), - rollup.getProofSubmissionEpochs(), - rollup.getGenesisArchiveTreeRoot(), - rollup.getSlashingProposerAddress(), - ] as const); - - const l1StartBlockHash = await publicClient - .getBlock({ blockNumber: l1StartBlock, includeTransactions: false }) - .then(block => Buffer32.fromString(block.hash)); - - const { aztecEpochDuration: epochDuration, aztecSlotDuration: slotDuration, ethereumSlotDuration } = config; - - const l1Constants = { - l1StartBlockHash, - l1StartBlock, - l1GenesisTime, - epochDuration, - slotDuration, - ethereumSlotDuration, - proofSubmissionEpochs: Number(proofSubmissionEpochs), - genesisArchiveRoot: Fr.fromString(genesisArchiveRoot.toString()), - }; - - const opts = merge( - { - pollingIntervalMs: 10_000, - batchSize: 100, - maxAllowedEthClientDriftSeconds: 300, - ethereumAllowNoDebugHosts: false, - }, - mapArchiverConfig(config), - ); - - const epochCache = deps.epochCache ?? (await EpochCache.create(config.l1Contracts.rollupAddress, config, deps)); - const telemetry = deps.telemetry ?? getTelemetryClient(); - - const archiver = new Archiver( - publicClient, - debugClient, - rollup, - inbox, - { ...config.l1Contracts, slashingProposerAddress }, - archiverStore, - opts, - deps.blobClient, - epochCache, - deps.dateProvider ?? new DateProvider(), - await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()), - l1Constants, - ); - await archiver.start(blockUntilSynced); - return archiver; - } - - /** Updates archiver config */ - public updateConfig(newConfig: Partial) { - this.config = merge(this.config, mapArchiverConfig(newConfig)); - } - - /** - * Starts sync process. - * @param blockUntilSynced - If true, blocks until the archiver has fully synced. - */ - public async start(blockUntilSynced: boolean): Promise { - if (this.runningPromise.isRunning()) { - throw new Error('Archiver is already running'); - } - - await this.blobClient.testSources(); - await this.testEthereumNodeSynced(); - await validateAndLogTraceAvailability(this.debugClient, this.config.ethereumAllowNoDebugHosts ?? false); - - // Log initial state for the archiver - const { l1StartBlock } = this.l1constants; - const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint(); - const currentL2Checkpoint = await this.getSynchedCheckpointNumber(); - this.log.info( - `Starting archiver sync to rollup contract ${this.rollup.address} from L1 block ${blocksSynchedTo} and L2 checkpoint ${currentL2Checkpoint}`, - { blocksSynchedTo, messagesSynchedTo, currentL2Checkpoint }, - ); - - // Start sync loop, and return the wait for initial sync if we are asked to block until synced - this.runningPromise.start(); - if (blockUntilSynced) { - return this.waitForInitialSync(); - } - } - - public syncImmediate() { - return this.runningPromise.trigger(); - } - - /** - * Queues a block to be added to the archiver store and triggers processing. - * The block will be processed by the sync loop. - * Implements the L2BlockSink interface. - * @param block - The L2 block to add. - * @returns A promise that resolves when the block has been added to the store, or rejects on error. - */ - public addBlock(block: L2BlockNew): Promise { - return new Promise((resolve, reject) => { - this.blockQueue.push({ block, resolve, reject }); - this.log.debug(`Queued block ${block.number} for processing`); - // Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed - this.syncImmediate().catch(err => { - this.log.error(`Sync immediate call failed: ${err}`); - }); - }); - } - - /** - * Processes all queued blocks, adding them to the store. - * Called at the beginning of each sync iteration. - * Blocks are processed in the order they were queued. - */ - private async processQueuedBlocks(): Promise { - if (this.blockQueue.length === 0) { - return; - } - - // Take all blocks from the queue - const queuedItems = this.blockQueue.splice(0, this.blockQueue.length); - this.log.debug(`Processing ${queuedItems.length} queued block(s)`); - - // Process each block individually to properly resolve/reject each promise - for (const { block, resolve, reject } of queuedItems) { - try { - await addBlocksWithContractData(this.store, [block]); - this.log.debug(`Added block ${block.number} to store`); - resolve(); - } catch (err: any) { - this.log.error(`Failed to add block ${block.number} to store: ${err.message}`); - reject(err); - } - } + /** Returns the last L1 block number that was synced. */ + public getL1BlockNumber(): bigint | undefined { + return this.l1BlockNumber; } - public waitForInitialSync() { - return this.initialSyncPromise.promise; + /** Returns the last L1 timestamp that was synced. */ + public getL1Timestamp(): bigint | undefined { + return this.l1Timestamp; } /** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */ - private async testEthereumNodeSynced() { + public async testEthereumNodeSynced(): Promise { const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds; if (maxAllowedDelay === 0) { return; @@ -384,7 +108,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl } @trackSpan('Archiver.syncFromL1') - private async syncFromL1() { + public async syncFromL1(initialSyncComplete: boolean): Promise { /** * We keep track of three "pointers" to L1 blocks: * 1. the last L1 block that published an L2 block @@ -454,7 +178,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl if (currentL1BlockNumber > blocksSynchedTo) { // First we retrieve new checkpoints and L2 blocks and store them in the DB. This will also update the // pending chain validation status, proven checkpoint number, and synched L1 block number. - const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber); + const rollupStatus = await this.handleCheckpoints(blocksSynchedTo, currentL1BlockNumber, initialSyncComplete); // Then we prune the current epoch if it'd reorg on next submission. // Note that we don't do this before retrieving checkpoints because we may need to retrieve // checkpoints from more than 2 epochs ago, so we want to make sure we have the latest view of @@ -496,34 +220,10 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl l1TimestampAtStart: currentL1Timestamp, l1BlockNumberAtEnd, }); - - // We resolve the initial sync only once we've caught up with the latest L1 block number (with 1 block grace) - // so if the initial sync took too long, we still go for another iteration. - if (!this.initialSyncComplete && currentL1BlockNumber + 1n >= l1BlockNumberAtEnd) { - this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete`, { - l1BlockNumber: currentL1BlockNumber, - syncPoint: await this.store.getSynchPoint(), - ...(await this.getL2Tips()), - }); - this.runningPromise.setPollingIntervalMS(this.config.pollingIntervalMs); - this.initialSyncComplete = true; - this.initialSyncPromise.resolve(); - } - } - - /** - * Fetches logs from L1 contracts and processes them. - */ - @trackSpan('Archiver.sync') - private async sync() { - // Process any queued blocks first, before doing L1 sync - await this.processQueuedBlocks(); - // Now perform L1 sync - await this.syncFromL1(); } /** Queries the rollup contract on whether a prune can be executed on the immediate next L1 block. */ - private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint) { + private async canPrune(currentL1BlockNumber: bigint, currentL1Timestamp: bigint): Promise { const time = (currentL1Timestamp ?? 0n) + BigInt(this.l1constants.ethereumSlotDuration); const result = await this.rollup.canPruneAtTime(time, { blockNumber: currentL1BlockNumber }); if (result) { @@ -542,9 +242,9 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl provenCheckpointNumber: CheckpointNumber, currentL1BlockNumber: bigint, currentL1Timestamp: bigint, - ) { + ): Promise<{ rollupCanPrune: boolean }> { const rollupCanPrune = await this.canPrune(currentL1BlockNumber, currentL1Timestamp); - const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber(); + const localPendingCheckpointNumber = await this.store.getSynchedCheckpointNumber(); const canPrune = localPendingCheckpointNumber > provenCheckpointNumber && rollupCanPrune; if (canPrune) { @@ -583,11 +283,11 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl this.log.debug( `L2 prune from ${provenCheckpointNumber + 1} to ${localPendingCheckpointNumber} will occur on next checkpoint submission.`, ); - await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind); + await this.updater.unwindCheckpointsWithContractData(localPendingCheckpointNumber, checkpointsToUnwind); this.log.warn( `Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `to ${provenCheckpointNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + - `Updated latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`, + `Updated latest checkpoint is ${await this.store.getSynchedCheckpointNumber()}.`, ); this.instrumentation.processPrune(timer.ms()); // TODO(palla/reorg): Do we need to set the block synched L1 block number here? @@ -613,7 +313,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl messagesSyncPoint: L1BlockId, currentL1BlockNumber: bigint, _currentL1BlockHash: Buffer32, - ) { + ): Promise { this.log.trace(`Handling L1 to L2 messages from ${messagesSyncPoint.l1BlockNumber} to ${currentL1BlockNumber}.`); if (currentL1BlockNumber <= messagesSyncPoint.l1BlockNumber) { return; @@ -633,7 +333,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl // Compare message count and rolling hash. If they match, no need to retrieve anything. if ( remoteMessagesState.totalMessagesInserted === localMessagesInserted && - remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer16.ZERO) + remoteMessagesState.messagesRollingHash.equals(localLastMessage?.rollingHash ?? Buffer32.ZERO) ) { this.log.trace( `No L1 to L2 messages to query between L1 blocks ${messagesSyncPoint.l1BlockNumber} and ${currentL1BlockNumber}.`, @@ -721,7 +421,10 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl return undefined; } - private async rollbackL1ToL2Messages(localLastMessage: InboxMessage, messagesSyncPoint: L1BlockId) { + private async rollbackL1ToL2Messages( + localLastMessage: InboxMessage, + messagesSyncPoint: L1BlockId, + ): Promise { // Slowly go back through our messages until we find the last common message. // We could query the logs in batch as an optimization, but the depth of the reorg should not be deep, and this // is a very rare case, so it's fine to query one log at a time. @@ -768,8 +471,12 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl } @trackSpan('Archiver.handleCheckpoints') - private async handleCheckpoints(blocksSynchedTo: bigint, currentL1BlockNumber: bigint): Promise { - const localPendingCheckpointNumber = await this.getSynchedCheckpointNumber(); + private async handleCheckpoints( + blocksSynchedTo: bigint, + currentL1BlockNumber: bigint, + initialSyncComplete: boolean, + ): Promise { + const localPendingCheckpointNumber = await this.store.getSynchedCheckpointNumber(); const initialValidationResult: ValidateCheckpointResult | undefined = await this.store.getPendingChainValidationStatus(); const { @@ -801,9 +508,9 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl // we need to set it to zero. This is an edge case because we dont have a checkpoint zero (initial checkpoint is one), // so localCheckpointForDestinationProvenCheckpointNumber would not be found below. if (provenCheckpointNumber === 0) { - const localProvenCheckpointNumber = await this.getProvenCheckpointNumber(); + const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber(); if (localProvenCheckpointNumber !== provenCheckpointNumber) { - await this.setProvenCheckpointNumber(provenCheckpointNumber); + await this.store.setProvenCheckpointNumber(provenCheckpointNumber); this.log.info(`Rolled back proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber }); } } @@ -813,7 +520,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl // Sanity check. I've hit what seems to be a state where the proven checkpoint is set to a value greater than the latest // synched checkpoint when requesting L2Tips from the archiver. This is the only place where the proven checkpoint is set. - const synched = await this.getSynchedCheckpointNumber(); + const synched = await this.store.getSynchedCheckpointNumber(); if ( localCheckpointForDestinationProvenCheckpointNumber && synched < localCheckpointForDestinationProvenCheckpointNumber.checkpointNumber @@ -833,9 +540,9 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl localCheckpointForDestinationProvenCheckpointNumber && provenArchive.equals(localCheckpointForDestinationProvenCheckpointNumber.archive.root) ) { - const localProvenCheckpointNumber = await this.getProvenCheckpointNumber(); + const localProvenCheckpointNumber = await this.store.getProvenCheckpointNumber(); if (localProvenCheckpointNumber !== provenCheckpointNumber) { - await this.setProvenCheckpointNumber(provenCheckpointNumber); + await this.store.setProvenCheckpointNumber(provenCheckpointNumber); this.log.info(`Updated proven chain to checkpoint ${provenCheckpointNumber}`, { provenCheckpointNumber }); const provenSlotNumber = localCheckpointForDestinationProvenCheckpointNumber.header.slotNumber; const provenEpochNumber: EpochNumber = getEpochAtSlot(provenSlotNumber, this.l1constants); @@ -928,12 +635,12 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl } const checkpointsToUnwind = localPendingCheckpointNumber - tipAfterUnwind; - await this.unwindCheckpoints(localPendingCheckpointNumber, checkpointsToUnwind); + await this.updater.unwindCheckpointsWithContractData(localPendingCheckpointNumber, checkpointsToUnwind); this.log.warn( `Unwound ${count(checkpointsToUnwind, 'checkpoint')} from checkpoint ${localPendingCheckpointNumber} ` + `due to mismatched checkpoint hashes at L1 block ${currentL1BlockNumber}. ` + - `Updated L2 latest checkpoint is ${await this.getSynchedCheckpointNumber()}.`, + `Updated L2 latest checkpoint is ${await this.store.getSynchedCheckpointNumber()}.`, ); } } @@ -962,7 +669,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl this.l1Addresses, this.instrumentation, this.log, - !this.initialSyncComplete, // isHistoricalSync + !initialSyncComplete, // isHistoricalSync ), ); @@ -1026,7 +733,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl // Check the inHash of the checkpoint against the l1->l2 messages. // The messages should've been synced up to the currentL1BlockNumber and must be available for the published // checkpoints we just retrieved. - const l1ToL2Messages = await this.getL1ToL2Messages(published.checkpoint.number); + const l1ToL2Messages = await this.store.getL1ToL2Messages(published.checkpoint.number); const computedInHash = computeInHashFromL1ToL2Messages(l1ToL2Messages); const publishedInHash = published.checkpoint.header.inHash; if (!computedInHash.equals(publishedInHash)) { @@ -1059,7 +766,7 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl rollupStatus.validationResult === initialValidationResult ? undefined : rollupStatus.validationResult; const [processDuration] = await elapsed(() => execInSpan(this.tracer, 'Archiver.addCheckpoints', () => - this.addCheckpoints(validCheckpoints, updatedValidationResult), + this.updater.addCheckpointsWithContractData(validCheckpoints, updatedValidationResult), ), ); this.instrumentation.processNewBlocks( @@ -1111,12 +818,12 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl status: RollupStatus, blocksSynchedTo: bigint, currentL1BlockNumber: bigint, - ) { + ): Promise { const { lastRetrievedCheckpoint, pendingCheckpointNumber } = status; // Compare the last checkpoint we have (either retrieved in this round or loaded from store) with what the // rollup contract told us was the latest one (pinned at the currentL1BlockNumber). const latestLocalCheckpointNumber = - lastRetrievedCheckpoint?.checkpoint.number ?? (await this.getSynchedCheckpointNumber()); + lastRetrievedCheckpoint?.checkpoint.number ?? (await this.store.getSynchedCheckpointNumber()); if (latestLocalCheckpointNumber < pendingCheckpointNumber) { // Here we have consumed all logs until the `currentL1Block` we pinned at the beginning of the archiver loop, // but still haven't reached the pending checkpoint according to the call to the rollup contract. @@ -1153,367 +860,11 @@ export class Archiver extends ArchiveSourceBase implements L2BlockSink, Traceabl } } - /** Resumes the archiver after a stop. */ - public resume() { - if (this.runningPromise.isRunning()) { - this.log.warn(`Archiver already running`); - } - this.log.info(`Restarting archiver`); - this.runningPromise.start(); - } - - /** - * Stops the archiver. - * @returns A promise signalling completion of the stop process. - */ - public async stop(): Promise { - this.log.debug('Stopping...'); - await this.runningPromise.stop(); - - this.log.info('Stopped.'); - return Promise.resolve(); - } - - public backupTo(destPath: string): Promise { - return this.dataStore.backupTo(destPath); - } - - public getL1Constants(): Promise { - return Promise.resolve(this.l1constants); - } - - public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> { - return Promise.resolve({ genesisArchiveRoot: this.l1constants.genesisArchiveRoot }); - } - - public getRollupAddress(): Promise { - return Promise.resolve(EthAddress.fromString(this.rollup.address)); - } - - public getRegistryAddress(): Promise { - return Promise.resolve(this.l1Addresses.registryAddress); - } - - public getL1BlockNumber(): bigint | undefined { - return this.l1BlockNumber; - } - - public getL1Timestamp(): Promise { - return Promise.resolve(this.l1Timestamp); - } - - public getL2SlotNumber(): Promise { - return Promise.resolve( - this.l1Timestamp === undefined ? undefined : getSlotAtTimestamp(this.l1Timestamp, this.l1constants), - ); - } - - public getL2EpochNumber(): Promise { - return Promise.resolve( - this.l1Timestamp === undefined ? undefined : getEpochNumberAtTimestamp(this.l1Timestamp, this.l1constants), - ); - } - - public async getBlocksForEpoch(epochNumber: EpochNumber): Promise { - const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants); - const blocks: L2Block[] = []; - - // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. - // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. - let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); - const slot = (b: CheckpointData) => b.header.slotNumber; - while (checkpoint && slot(checkpoint) >= start) { - if (slot(checkpoint) <= end) { - // push the blocks on backwards - const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1; - for (let i = endBlock; i >= checkpoint.startBlock; i--) { - const block = await this.getBlock(BlockNumber(i)); - if (block) { - blocks.push(block); - } - } - } - checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1)); - } - - return blocks.reverse(); - } - - public async getBlockHeadersForEpoch(epochNumber: EpochNumber): Promise { - const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants); - const blocks: BlockHeader[] = []; - - // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. - // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. - let checkpoint = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); - const slot = (b: CheckpointData) => b.header.slotNumber; - while (checkpoint && slot(checkpoint) >= start) { - if (slot(checkpoint) <= end) { - // push the blocks on backwards - const endBlock = checkpoint.startBlock + checkpoint.numBlocks - 1; - for (let i = endBlock; i >= checkpoint.startBlock; i--) { - const block = await this.getBlockHeader(BlockNumber(i)); - if (block) { - blocks.push(block); - } - } - } - checkpoint = await this.store.getCheckpointData(CheckpointNumber(checkpoint.checkpointNumber - 1)); - } - return blocks.reverse(); - } - - public async isEpochComplete(epochNumber: EpochNumber): Promise { - // The epoch is complete if the current L2 block is the last one in the epoch (or later) - const header = await this.getBlockHeader('latest'); - const slot = header ? header.globalVariables.slotNumber : undefined; - const [_startSlot, endSlot] = getSlotRangeForEpoch(epochNumber, this.l1constants); - if (slot && slot >= endSlot) { - return true; - } - - // If we haven't run an initial sync, just return false. - const l1Timestamp = this.l1Timestamp; - if (l1Timestamp === undefined) { - return false; - } - - // If not, the epoch may also be complete if the L2 slot has passed without a block - // We compute this based on the end timestamp for the given epoch and the timestamp of the last L1 block - const [_startTimestamp, endTimestamp] = getTimestampRangeForEpoch(epochNumber, this.l1constants); - - // For this computation, we throw in a few extra seconds just for good measure, - // since we know the next L1 block won't be mined within this range. Remember that - // l1timestamp is the timestamp of the last l1 block we've seen, so this relies on - // the fact that L1 won't mine two blocks within this time of each other. - // TODO(palla/reorg): Is the above a safe assumption? - const leeway = 1n; - return l1Timestamp + leeway >= endTimestamp; - } - - /** Returns whether the archiver has completed an initial sync run successfully. */ - public isInitialSyncComplete(): boolean { - return this.initialSyncComplete; - } - - public async getCheckpointHeader(number: CheckpointNumber | 'latest'): Promise { - if (number === 'latest') { - number = await this.getSynchedCheckpointNumber(); - } - if (number === 0) { - return undefined; - } + private async getCheckpointHeader(number: CheckpointNumber) { const checkpoint = await this.store.getCheckpointData(number); if (!checkpoint) { return undefined; } return checkpoint.header; } - - public getCheckpointNumber(): Promise { - return this.getSynchedCheckpointNumber(); - } - - public getSynchedCheckpointNumber(): Promise { - return this.store.getSynchedCheckpointNumber(); - } - - public getProvenCheckpointNumber(): Promise { - return this.store.getProvenCheckpointNumber(); - } - - public setProvenCheckpointNumber(checkpointNumber: CheckpointNumber): Promise { - return this.store.setProvenCheckpointNumber(checkpointNumber); - } - - public unwindCheckpoints(from: CheckpointNumber, checkpointsToUnwind: number): Promise { - return unwindCheckpointsWithContractData(this.store, from, checkpointsToUnwind); - } - - public async getLastBlockNumberInCheckpoint(checkpointNumber: CheckpointNumber): Promise { - const checkpointData = await this.store.getCheckpointData(checkpointNumber); - if (!checkpointData) { - return undefined; - } - return BlockNumber(checkpointData.startBlock + checkpointData.numBlocks - 1); - } - - public addCheckpoints( - checkpoints: PublishedCheckpoint[], - pendingChainValidationStatus?: ValidateCheckpointResult, - ): Promise { - return addCheckpointsWithContractData(this.store, checkpoints, pendingChainValidationStatus); - } - - getCheckpointedBlockNumber(): Promise { - return this.store.getCheckpointedL2BlockNumber(); - } - - public async getL2Tips(): Promise { - const [latestBlockNumber, provenBlockNumber, checkpointedBlockNumber] = await Promise.all([ - this.getBlockNumber(), - this.getProvenBlockNumber(), - this.getCheckpointedBlockNumber(), - ] as const); - - // TODO(#13569): Compute proper finalized block number based on L1 finalized block. - // We just force it 2 epochs worth of proven data for now. - // NOTE: update end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts as that uses finalized blocks in computations - const finalizedBlockNumber = BlockNumber(Math.max(provenBlockNumber - this.l1constants.epochDuration * 2, 0)); - - const beforeInitialblockNumber = BlockNumber(INITIAL_L2_BLOCK_NUM - 1); - - // Get the latest block header and checkpointed blocks for proven, finalised and checkpointed blocks - const [latestBlockHeader, provenCheckpointedBlock, finalizedCheckpointedBlock, checkpointedBlock] = - await Promise.all([ - latestBlockNumber > beforeInitialblockNumber ? this.getBlockHeader(latestBlockNumber) : undefined, - provenBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(provenBlockNumber) : undefined, - finalizedBlockNumber > beforeInitialblockNumber ? this.getCheckpointedBlock(finalizedBlockNumber) : undefined, - checkpointedBlockNumber > beforeInitialblockNumber - ? this.getCheckpointedBlock(checkpointedBlockNumber) - : undefined, - ] as const); - - if (latestBlockNumber > beforeInitialblockNumber && !latestBlockHeader) { - throw new Error(`Failed to retrieve latest block header for block ${latestBlockNumber}`); - } - - // Checkpointed blocks must exist for proven, finalized and checkpointed tips if they are beyond the initial block number. - if (checkpointedBlockNumber > beforeInitialblockNumber && !checkpointedBlock?.block.header) { - throw new Error( - `Failed to retrieve checkpointed block header for block ${checkpointedBlockNumber} (latest block is ${latestBlockNumber})`, - ); - } - - if (provenBlockNumber > beforeInitialblockNumber && !provenCheckpointedBlock?.block.header) { - throw new Error( - `Failed to retrieve proven checkpointed for block ${provenBlockNumber} (latest block is ${latestBlockNumber})`, - ); - } - - if (finalizedBlockNumber > beforeInitialblockNumber && !finalizedCheckpointedBlock?.block.header) { - throw new Error( - `Failed to retrieve finalized block header for block ${finalizedBlockNumber} (latest block is ${latestBlockNumber})`, - ); - } - - const latestBlockHeaderHash = (await latestBlockHeader?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - const provenBlockHeaderHash = (await provenCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - const finalizedBlockHeaderHash = - (await finalizedCheckpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - const checkpointedBlockHeaderHash = (await checkpointedBlock?.block.header?.hash()) ?? GENESIS_BLOCK_HEADER_HASH; - - // Now attempt to retrieve checkpoints for proven, finalised and checkpointed blocks - const [[provenBlockCheckpoint], [finalizedBlockCheckpoint], [checkpointedBlockCheckpoint]] = await Promise.all([ - provenCheckpointedBlock !== undefined - ? await this.getPublishedCheckpoints(provenCheckpointedBlock?.checkpointNumber, 1) - : [undefined], - finalizedCheckpointedBlock !== undefined - ? await this.getPublishedCheckpoints(finalizedCheckpointedBlock?.checkpointNumber, 1) - : [undefined], - checkpointedBlock !== undefined - ? await this.getPublishedCheckpoints(checkpointedBlock?.checkpointNumber, 1) - : [undefined], - ]); - - const initialcheckpointId: CheckpointId = { - number: CheckpointNumber.ZERO, - hash: GENESIS_CHECKPOINT_HEADER_HASH.toString(), - }; - - const makeCheckpointId = (checkpoint: PublishedCheckpoint | undefined) => { - if (checkpoint === undefined) { - return initialcheckpointId; - } - return { - number: checkpoint.checkpoint.number, - hash: checkpoint.checkpoint.hash().toString(), - }; - }; - - const l2Tips: L2Tips = { - proposed: { - number: latestBlockNumber, - hash: latestBlockHeaderHash.toString(), - }, - proven: { - block: { - number: provenBlockNumber, - hash: provenBlockHeaderHash.toString(), - }, - checkpoint: makeCheckpointId(provenBlockCheckpoint), - }, - finalized: { - block: { - number: finalizedBlockNumber, - hash: finalizedBlockHeaderHash.toString(), - }, - checkpoint: makeCheckpointId(finalizedBlockCheckpoint), - }, - checkpointed: { - block: { - number: checkpointedBlockNumber, - hash: checkpointedBlockHeaderHash.toString(), - }, - checkpoint: makeCheckpointId(checkpointedBlockCheckpoint), - }, - }; - - return l2Tips; - } - - public async rollbackTo(targetL2BlockNumber: BlockNumber): Promise { - // TODO(pw/mbps): This still assumes 1 block per checkpoint - const currentBlocks = await this.getL2Tips(); - const currentL2Block = currentBlocks.proposed.number; - const currentProvenBlock = currentBlocks.proven.block.number; - - if (targetL2BlockNumber >= currentL2Block) { - throw new Error(`Target L2 block ${targetL2BlockNumber} must be less than current L2 block ${currentL2Block}`); - } - const blocksToUnwind = currentL2Block - targetL2BlockNumber; - const targetL2Block = await this.store.getCheckpointedBlock(targetL2BlockNumber); - if (!targetL2Block) { - throw new Error(`Target L2 block ${targetL2BlockNumber} not found`); - } - const targetL1BlockNumber = targetL2Block.l1.blockNumber; - const targetCheckpointNumber = CheckpointNumber.fromBlockNumber(targetL2BlockNumber); - const targetL1BlockHash = await this.getL1BlockHash(targetL1BlockNumber); - this.log.info(`Unwinding ${blocksToUnwind} checkpoints from L2 block ${currentL2Block}`); - await unwindCheckpointsWithContractData(this.store, CheckpointNumber(currentL2Block), blocksToUnwind); - this.log.info(`Unwinding L1 to L2 messages to checkpoint ${targetCheckpointNumber}`); - await this.store.rollbackL1ToL2MessagesToCheckpoint(targetCheckpointNumber); - this.log.info(`Setting L1 syncpoints to ${targetL1BlockNumber}`); - await this.store.setCheckpointSynchedL1BlockNumber(targetL1BlockNumber); - await this.store.setMessageSynchedL1Block({ l1BlockNumber: targetL1BlockNumber, l1BlockHash: targetL1BlockHash }); - if (targetL2BlockNumber < currentProvenBlock) { - this.log.info(`Clearing proven L2 block number`); - await this.store.setProvenCheckpointNumber(CheckpointNumber.ZERO); - } - // TODO(palla/reorg): Set the finalized block when we add support for it. - // if (targetL2BlockNumber < currentFinalizedBlock) { - // this.log.info(`Clearing finalized L2 block number`); - // await this.store.setFinalizedL2BlockNumber(0); - // } - } - - public async getCheckpointsForEpoch(epochNumber: EpochNumber): Promise { - const [start, end] = getSlotRangeForEpoch(epochNumber, this.l1constants); - const checkpoints: Checkpoint[] = []; - - // Walk the list of checkpoints backwards and filter by slots matching the requested epoch. - // We'll typically ask for checkpoints for a very recent epoch, so we shouldn't need an index here. - let checkpointData = await this.store.getCheckpointData(await this.store.getSynchedCheckpointNumber()); - const slot = (b: CheckpointData) => b.header.slotNumber; - while (checkpointData && slot(checkpointData) >= start) { - if (slot(checkpointData) <= end) { - // push the checkpoints on backwards - const [checkpoint] = await this.getPublishedCheckpoints(checkpointData.checkpointNumber, 1); - checkpoints.push(checkpoint.checkpoint); - } - checkpointData = await this.store.getCheckpointData(CheckpointNumber(checkpointData.checkpointNumber - 1)); - } - - return checkpoints.reverse(); - } } diff --git a/yarn-project/archiver/src/archiver/validation.test.ts b/yarn-project/archiver/src/modules/validation.test.ts similarity index 94% rename from yarn-project/archiver/src/archiver/validation.test.ts rename to yarn-project/archiver/src/modules/validation.test.ts index b6e808b31edb..306488244474 100644 --- a/yarn-project/archiver/src/archiver/validation.test.ts +++ b/yarn-project/archiver/src/modules/validation.test.ts @@ -6,13 +6,12 @@ import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Signature } from '@aztec/foundation/eth-signature'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { CommitteeAttestation, EthAddress } from '@aztec/stdlib/block'; -import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; -import { orderAttestations } from '@aztec/stdlib/p2p'; -import { makeCheckpointAttestationFromCheckpoint } from '@aztec/stdlib/testing'; +import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { type MockProxy, mock } from 'jest-mock-extended'; import assert from 'node:assert'; +import { makeSignedPublishedCheckpoint } from '../test/mock_structs.js'; import { getAttestationInfoFromPublishedCheckpoint, validateCheckpointAttestations } from './validation.js'; describe('validateCheckpointAttestations', () => { @@ -25,9 +24,7 @@ describe('validateCheckpointAttestations', () => { const makeCheckpoint = async (signers: Secp256k1Signer[], committee: EthAddress[], slot?: number) => { const checkpoint = await Checkpoint.random(CheckpointNumber(1), { slotNumber: SlotNumber(slot ?? 1) }); - const attestations = signers.map(signer => makeCheckpointAttestationFromCheckpoint(checkpoint, signer)); - const committeeAttestations = orderAttestations(attestations, committee); - return new PublishedCheckpoint(checkpoint, L1PublishedData.random(), committeeAttestations); + return makeSignedPublishedCheckpoint(checkpoint, signers, committee); }; const setCommittee = (committee: EthAddress[]) => { diff --git a/yarn-project/archiver/src/archiver/validation.ts b/yarn-project/archiver/src/modules/validation.ts similarity index 100% rename from yarn-project/archiver/src/archiver/validation.ts rename to yarn-project/archiver/src/modules/validation.ts diff --git a/yarn-project/archiver/src/rpc/index.ts b/yarn-project/archiver/src/rpc/index.ts deleted file mode 100644 index b42352d6eb7c..000000000000 --- a/yarn-project/archiver/src/rpc/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { createSafeJsonRpcClient } from '@aztec/foundation/json-rpc/client'; -import { type ArchiverApi, ArchiverApiSchema } from '@aztec/stdlib/interfaces/server'; -import { type ComponentsVersions, getVersioningResponseHandler } from '@aztec/stdlib/versioning'; -import { makeTracedFetch } from '@aztec/telemetry-client'; - -export function createArchiverClient( - url: string, - versions: Partial, - fetch = makeTracedFetch([1, 2, 3], true), -): ArchiverApi { - return createSafeJsonRpcClient(url, ArchiverApiSchema, { - namespaceMethods: 'archiver', - fetch, - onResponse: getVersioningResponseHandler(versions), - }); -} diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/store/block_store.ts similarity index 100% rename from yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts rename to yarn-project/archiver/src/store/block_store.ts diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/contract_class_store.ts b/yarn-project/archiver/src/store/contract_class_store.ts similarity index 100% rename from yarn-project/archiver/src/archiver/kv_archiver_store/contract_class_store.ts rename to yarn-project/archiver/src/store/contract_class_store.ts diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/contract_instance_store.ts b/yarn-project/archiver/src/store/contract_instance_store.ts similarity index 100% rename from yarn-project/archiver/src/archiver/kv_archiver_store/contract_instance_store.ts rename to yarn-project/archiver/src/store/contract_instance_store.ts diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.test.ts b/yarn-project/archiver/src/store/kv_archiver_store.test.ts similarity index 94% rename from yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.test.ts rename to yarn-project/archiver/src/store/kv_archiver_store.test.ts index 1b784853648b..84b31e3b9048 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.test.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.test.ts @@ -1,14 +1,7 @@ -import { - INITIAL_CHECKPOINT_NUMBER, - INITIAL_L2_BLOCK_NUM, - MAX_NOTE_HASHES_PER_TX, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, - PRIVATE_LOG_SIZE_IN_FIELDS, -} from '@aztec/constants'; -import { makeTuple } from '@aztec/foundation/array'; +import { INITIAL_CHECKPOINT_NUMBER, INITIAL_L2_BLOCK_NUM, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants'; import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types'; import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; -import { times, timesParallel } from '@aztec/foundation/collection'; +import { times } from '@aztec/foundation/collection'; import { randomInt } from '@aztec/foundation/crypto/random'; import { Fr } from '@aztec/foundation/curves/bn254'; import { toArray } from '@aztec/foundation/iterable'; @@ -23,15 +16,14 @@ import { L2BlockNew, type ValidateCheckpointResult, } from '@aztec/stdlib/block'; -import { Checkpoint, L1PublishedData, PublishedCheckpoint, randomCheckpointInfo } from '@aztec/stdlib/checkpoint'; +import { Checkpoint, PublishedCheckpoint, randomCheckpointInfo } from '@aztec/stdlib/checkpoint'; import { type ContractClassPublic, type ContractInstanceWithAddress, SerializableContractInstance, computePublicBytecodeCommitment, } from '@aztec/stdlib/contract'; -import { ContractClassLog, LogId, PrivateLog, PublicLog, SiloedTag, Tag } from '@aztec/stdlib/logs'; -import { InboxLeaf } from '@aztec/stdlib/messaging'; +import { ContractClassLog, LogId } from '@aztec/stdlib/logs'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { makeContractClassPublic, @@ -40,9 +32,8 @@ import { } from '@aztec/stdlib/testing'; import '@aztec/stdlib/testing/jest'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; -import { type IndexedTxEffect, PartialStateReference, StateReference, TxEffect, TxHash } from '@aztec/stdlib/tx'; +import { type IndexedTxEffect, TxHash } from '@aztec/stdlib/tx'; -import { makeInboxMessage, makeInboxMessages } from '../../test/mock_structs.js'; import { BlockArchiveNotConsistentError, BlockIndexNotSequentialError, @@ -52,8 +43,20 @@ import { InitialBlockNumberNotSequentialError, InitialCheckpointNumberNotSequentialError, } from '../errors.js'; -import { MessageStoreError } from '../kv_archiver_store/message_store.js'; +import { MessageStoreError } from '../store/message_store.js'; import type { InboxMessage } from '../structs/inbox_message.js'; +import { + makeCheckpointWithLogs, + makeInboxMessage, + makeInboxMessages, + makeInboxMessagesWithFullBlocks, + makePrivateLog, + makePrivateLogTag, + makePublicLog, + makePublicLogTag, + makePublishedCheckpoint, + makeStateForBlock, +} from '../test/mock_structs.js'; import { type ArchiverL1SynchPoint, KVArchiverDataStore } from './kv_archiver_store.js'; describe('KVArchiverDataStore', () => { @@ -66,33 +69,6 @@ describe('KVArchiverDataStore', () => { [5, () => publishedCheckpoints[4].checkpoint.blocks[0]], ]; - const makeBlockHash = (blockNumber: number) => `0x${blockNumber.toString(16).padStart(64, '0')}`; - - // Create a state reference with properly calculated noteHashTree.nextAvailableLeafIndex - // This is needed because the log store calculates dataStartIndexForBlock as: - // noteHashTree.nextAvailableLeafIndex - txEffects.length * MAX_NOTE_HASHES_PER_TX - // If nextAvailableLeafIndex is too small (random values 0-1000), this becomes negative - const makeStateForBlock = (blockNumber: number, txsPerBlock: number): StateReference => { - // Ensure nextAvailableLeafIndex is large enough for all blocks up to this point - const noteHashIndex = blockNumber * txsPerBlock * MAX_NOTE_HASHES_PER_TX; - return new StateReference( - AppendOnlyTreeSnapshot.random(), - new PartialStateReference( - new AppendOnlyTreeSnapshot(Fr.random(), noteHashIndex), - AppendOnlyTreeSnapshot.random(), - AppendOnlyTreeSnapshot.random(), - ), - ); - }; - - const makePublishedCheckpoint = (checkpoint: Checkpoint, l1BlockNumber: number): PublishedCheckpoint => { - return new PublishedCheckpoint( - checkpoint, - new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)), - times(3, CommitteeAttestation.random), - ); - }; - const expectCheckpointedBlockEquals = ( actual: CheckpointedL2Block, expectedBlock: L2BlockNew, @@ -1798,22 +1774,6 @@ describe('KVArchiverDataStore', () => { expect(await store.getTotalL1ToL2MessageCount()).toEqual(BigInt(msgs.length)); }; - const makeInboxMessagesWithFullBlocks = ( - blockCount: number, - opts: { initialCheckpointNumber?: CheckpointNumber } = {}, - ) => - makeInboxMessages(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * blockCount, { - overrideFn: (msg, i) => { - const checkpointNumber = CheckpointNumber( - (opts.initialCheckpointNumber ?? initialCheckpointNumber) + - Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP), - ); - const index = - InboxLeaf.smallestIndexForCheckpoint(checkpointNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - return { ...msg, checkpointNumber, index }; - }, - }); - it('stores first message ever', async () => { const msg = makeInboxMessage(Buffer16.ZERO, { index: 0n, checkpointNumber: CheckpointNumber(1) }); await store.addL1ToL2Messages([msg]); @@ -2195,60 +2155,18 @@ describe('KVArchiverDataStore', () => { let logsCheckpoints: PublishedCheckpoint[]; - const makePrivateLogTag = (blockNumber: number, txIndex: number, logIndex: number): SiloedTag => - new SiloedTag( - blockNumber === 1 && txIndex === 0 && logIndex === 0 - ? Fr.ZERO // Shared tag - : new Fr(blockNumber * 100 + txIndex * 10 + logIndex), - ); - - const makePrivateLog = (tag: SiloedTag) => - PrivateLog.from({ - fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, i => (!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))), - emittedLength: PRIVATE_LOG_SIZE_IN_FIELDS, - }); - - const mockPrivateLogs = (blockNumber: number, txIndex: number) => { - return times(numPrivateLogsPerTx, (logIndex: number) => { - const tag = makePrivateLogTag(blockNumber, txIndex, logIndex); - return makePrivateLog(tag); - }); - }; - - const mockCheckpointWithLogs = async ( - blockNumber: number, - previousArchive?: AppendOnlyTreeSnapshot, - ): Promise => { - const block = await L2BlockNew.random(BlockNumber(blockNumber), { - checkpointNumber: CheckpointNumber(blockNumber), - indexWithinCheckpoint: 0, - state: makeStateForBlock(blockNumber, numTxsPerBlock), - ...(previousArchive ? { lastArchive: previousArchive } : {}), - }); - block.header.globalVariables.blockNumber = BlockNumber(blockNumber); - - block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => { - const txEffect = await TxEffect.random(); - txEffect.privateLogs = mockPrivateLogs(blockNumber, txIndex); - txEffect.publicLogs = []; // No public logs needed for private log tests - return txEffect; - }); - - const checkpoint = new Checkpoint( - AppendOnlyTreeSnapshot.random(), - CheckpointHeader.random(), - [block], - CheckpointNumber(blockNumber), - ); - return makePublishedCheckpoint(checkpoint, blockNumber); - }; - beforeEach(async () => { // Create checkpoints sequentially to chain archive roots logsCheckpoints = []; for (let i = 0; i < numBlocksForLogs; i++) { const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined; - logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive)); + logsCheckpoints.push( + await makeCheckpointWithLogs(i + 1, { + previousArchive, + numTxsPerBlock, + privateLogs: { numLogsPerTx: numPrivateLogsPerTx }, + }), + ); } await store.addCheckpoints(logsCheckpoints); @@ -2283,7 +2201,11 @@ describe('KVArchiverDataStore', () => { // Chain from the last checkpoint's archive const newBlockNumber = numBlocksForLogs + 1; const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive; - const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive); + const newCheckpoint = await makeCheckpointWithLogs(newBlockNumber, { + previousArchive, + numTxsPerBlock, + privateLogs: { numLogsPerTx: numPrivateLogsPerTx }, + }); const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1]; newLog.fields[0] = tags[0].value; newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1] = newLog; @@ -2333,61 +2255,18 @@ describe('KVArchiverDataStore', () => { let logsCheckpoints: PublishedCheckpoint[]; - const makePublicLogTag = (blockNumber: number, txIndex: number, logIndex: number): Tag => - new Tag( - blockNumber === 1 && txIndex === 0 && logIndex === 0 - ? Fr.ZERO // Shared tag - : new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * 123), - ); - - const makePublicLog = (tag: Tag) => - PublicLog.from({ - contractAddress: contractAddress, - // Arbitrary length - fields: new Array(10).fill(null).map((_, i) => (!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))), - }); - - const mockPublicLogs = (blockNumber: number, txIndex: number) => { - return times(numPublicLogsPerTx, (logIndex: number) => { - const tag = makePublicLogTag(blockNumber, txIndex, logIndex); - return makePublicLog(tag); - }); - }; - - const mockCheckpointWithLogs = async ( - blockNumber: number, - previousArchive?: AppendOnlyTreeSnapshot, - ): Promise => { - const block = await L2BlockNew.random(BlockNumber(blockNumber), { - checkpointNumber: CheckpointNumber(blockNumber), - indexWithinCheckpoint: 0, - state: makeStateForBlock(blockNumber, numTxsPerBlock), - ...(previousArchive ? { lastArchive: previousArchive } : {}), - }); - block.header.globalVariables.blockNumber = BlockNumber(blockNumber); - - block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => { - const txEffect = await TxEffect.random(); - txEffect.privateLogs = []; // No private logs needed for public log tests - txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex); - return txEffect; - }); - - const checkpoint = new Checkpoint( - AppendOnlyTreeSnapshot.random(), - CheckpointHeader.random(), - [block], - CheckpointNumber(blockNumber), - ); - return makePublishedCheckpoint(checkpoint, blockNumber); - }; - beforeEach(async () => { // Create checkpoints sequentially to chain archive roots logsCheckpoints = []; for (let i = 0; i < numBlocksForLogs; i++) { const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined; - logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive)); + logsCheckpoints.push( + await makeCheckpointWithLogs(i + 1, { + previousArchive, + numTxsPerBlock, + publicLogs: { numLogsPerTx: numPublicLogsPerTx, contractAddress }, + }), + ); } await store.addCheckpoints(logsCheckpoints); @@ -2422,7 +2301,11 @@ describe('KVArchiverDataStore', () => { // Chain from the last checkpoint's archive const newBlockNumber = numBlocksForLogs + 1; const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive; - const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive); + const newCheckpoint = await makeCheckpointWithLogs(newBlockNumber, { + previousArchive, + numTxsPerBlock, + publicLogs: { numLogsPerTx: numPublicLogsPerTx, contractAddress }, + }); const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1]; newLog.fields[0] = tags[0].value; newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1] = newLog; diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/store/kv_archiver_store.ts similarity index 100% rename from yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts rename to yarn-project/archiver/src/store/kv_archiver_store.ts diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/store/log_store.ts similarity index 100% rename from yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts rename to yarn-project/archiver/src/store/log_store.ts diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts b/yarn-project/archiver/src/store/message_store.ts similarity index 100% rename from yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts rename to yarn-project/archiver/src/store/message_store.ts diff --git a/yarn-project/archiver/src/archiver/structs/data_retrieval.ts b/yarn-project/archiver/src/structs/data_retrieval.ts similarity index 100% rename from yarn-project/archiver/src/archiver/structs/data_retrieval.ts rename to yarn-project/archiver/src/structs/data_retrieval.ts diff --git a/yarn-project/archiver/src/archiver/structs/inbox_message.test.ts b/yarn-project/archiver/src/structs/inbox_message.test.ts similarity index 86% rename from yarn-project/archiver/src/archiver/structs/inbox_message.test.ts rename to yarn-project/archiver/src/structs/inbox_message.test.ts index 8ee9525e5b6e..7bb089d06d3d 100644 --- a/yarn-project/archiver/src/archiver/structs/inbox_message.test.ts +++ b/yarn-project/archiver/src/structs/inbox_message.test.ts @@ -1,4 +1,4 @@ -import { makeInboxMessage } from '../../test/mock_structs.js'; +import { makeInboxMessage } from '../test/mock_structs.js'; import { deserializeInboxMessage, serializeInboxMessage } from './inbox_message.js'; describe('InboxMessage', () => { diff --git a/yarn-project/archiver/src/archiver/structs/inbox_message.ts b/yarn-project/archiver/src/structs/inbox_message.ts similarity index 100% rename from yarn-project/archiver/src/archiver/structs/inbox_message.ts rename to yarn-project/archiver/src/structs/inbox_message.ts diff --git a/yarn-project/archiver/src/archiver/structs/published.ts b/yarn-project/archiver/src/structs/published.ts similarity index 100% rename from yarn-project/archiver/src/archiver/structs/published.ts rename to yarn-project/archiver/src/structs/published.ts diff --git a/yarn-project/archiver/src/archiver/test/fake_l1_state.ts b/yarn-project/archiver/src/test/fake_l1_state.ts similarity index 100% rename from yarn-project/archiver/src/archiver/test/fake_l1_state.ts rename to yarn-project/archiver/src/test/fake_l1_state.ts diff --git a/yarn-project/archiver/src/test/index.ts b/yarn-project/archiver/src/test/index.ts index 3e22042e59e0..308541367aac 100644 --- a/yarn-project/archiver/src/test/index.ts +++ b/yarn-project/archiver/src/test/index.ts @@ -1,3 +1,4 @@ +export * from './mock_structs.js'; export * from './mock_l2_block_source.js'; export * from './mock_l1_to_l2_message_source.js'; export * from './mock_archiver.js'; diff --git a/yarn-project/archiver/src/test/mock_structs.ts b/yarn-project/archiver/src/test/mock_structs.ts index 24c588213636..65bcb9bfac24 100644 --- a/yarn-project/archiver/src/test/mock_structs.ts +++ b/yarn-project/archiver/src/test/mock_structs.ts @@ -1,10 +1,28 @@ -import { CheckpointNumber } from '@aztec/foundation/branded-types'; +import { + MAX_NOTE_HASHES_PER_TX, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + PRIVATE_LOG_SIZE_IN_FIELDS, +} from '@aztec/constants'; +import { makeTuple } from '@aztec/foundation/array'; +import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; +import { times, timesParallel } from '@aztec/foundation/collection'; import { randomBigInt, randomInt } from '@aztec/foundation/crypto/random'; +import type { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { CommitteeAttestation, L2BlockNew } from '@aztec/stdlib/block'; +import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { PrivateLog, PublicLog, SiloedTag, Tag } from '@aztec/stdlib/logs'; import { InboxLeaf } from '@aztec/stdlib/messaging'; +import { orderAttestations } from '@aztec/stdlib/p2p'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { makeCheckpointAttestationFromCheckpoint } from '@aztec/stdlib/testing'; +import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; +import { PartialStateReference, StateReference, TxEffect } from '@aztec/stdlib/tx'; -import { type InboxMessage, updateRollingHash } from '../archiver/structs/inbox_message.js'; +import { type InboxMessage, updateRollingHash } from '../structs/inbox_message.js'; export function makeInboxMessage( previousRollingHash = Buffer16.ZERO, @@ -48,3 +66,230 @@ export function makeInboxMessages( return messages; } + +/** Creates inbox messages distributed across multiple blocks with proper checkpoint numbering. */ +export function makeInboxMessagesWithFullBlocks( + blockCount: number, + opts: { initialCheckpointNumber?: CheckpointNumber } = {}, +): InboxMessage[] { + const { initialCheckpointNumber = CheckpointNumber(13) } = opts; + return makeInboxMessages(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * blockCount, { + overrideFn: (msg, i) => { + const checkpointNumber = CheckpointNumber( + initialCheckpointNumber + Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP), + ); + const index = + InboxLeaf.smallestIndexForCheckpoint(checkpointNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + return { ...msg, checkpointNumber, index }; + }, + }); +} + +/** Creates a deterministic block hash from a block number. */ +export function makeBlockHash(blockNumber: number): `0x${string}` { + return `0x${blockNumber.toString(16).padStart(64, '0')}`; +} + +/** + * Creates a StateReference with properly calculated noteHashTree.nextAvailableLeafIndex. + * This ensures LogStore's dataStartIndexForBlock calculation doesn't produce negative values. + */ +export function makeStateForBlock(blockNumber: number, txsPerBlock: number): StateReference { + const noteHashIndex = blockNumber * txsPerBlock * MAX_NOTE_HASHES_PER_TX; + return new StateReference( + AppendOnlyTreeSnapshot.random(), + new PartialStateReference( + new AppendOnlyTreeSnapshot(Fr.random(), noteHashIndex), + AppendOnlyTreeSnapshot.random(), + AppendOnlyTreeSnapshot.random(), + ), + ); +} + +/** Creates L1PublishedData with deterministic values based on l1BlockNumber. */ +export function makeL1PublishedData(l1BlockNumber: number): L1PublishedData { + return new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)); +} + +/** Wraps a Checkpoint with L1 published data and random attestations. */ +export function makePublishedCheckpoint( + checkpoint: Checkpoint, + l1BlockNumber: number, + attestationCount = 3, +): PublishedCheckpoint { + return new PublishedCheckpoint( + checkpoint, + makeL1PublishedData(l1BlockNumber), + times(attestationCount, CommitteeAttestation.random), + ); +} + +export interface MakeChainedCheckpointsOptions { + /** Number of L2 blocks per checkpoint. Default: 1 */ + blocksPerCheckpoint?: number; + /** Number of transactions per block. Default: 4 */ + txsPerBlock?: number; + /** Starting checkpoint number. Default: CheckpointNumber(1) */ + startCheckpointNumber?: CheckpointNumber; + /** Starting block number. Default: 1 */ + startBlockNumber?: number; + /** Starting L1 block number. Default: 10 */ + startL1BlockNumber?: number; + /** Previous archive to chain from. Default: undefined */ + previousArchive?: AppendOnlyTreeSnapshot; + /** Optional function to provide per-checkpoint overrides */ + makeCheckpointOptions?: (cpNumber: CheckpointNumber) => Partial[1]>; +} + +/** + * Creates multiple checkpoints with properly chained archives. + * Each checkpoint's blocks have their lastArchive set to the previous block's archive, + * ensuring archive chain continuity for testing. + */ +export async function makeChainedCheckpoints( + count: number, + options: MakeChainedCheckpointsOptions = {}, +): Promise { + const { + blocksPerCheckpoint = 1, + txsPerBlock = 4, + startCheckpointNumber = CheckpointNumber(1), + startBlockNumber = 1, + startL1BlockNumber = 10, + makeCheckpointOptions, + } = options; + + let previousArchive = options.previousArchive; + const checkpoints: PublishedCheckpoint[] = []; + + for (let i = 0; i < count; i++) { + const cpNumber = CheckpointNumber(startCheckpointNumber + i); + const blockStart = startBlockNumber + i * blocksPerCheckpoint; + const customOptions = makeCheckpointOptions?.(cpNumber) ?? {}; + + const checkpoint = await Checkpoint.random(cpNumber, { + numBlocks: blocksPerCheckpoint, + startBlockNumber: blockStart, + previousArchive, + txsPerBlock, + state: makeStateForBlock(blockStart, txsPerBlock), + txOptions: { numPublicCallsPerTx: 2, numPublicLogsPerCall: 2 }, + ...customOptions, + }); + + previousArchive = checkpoint.blocks.at(-1)!.archive; + checkpoints.push(makePublishedCheckpoint(checkpoint, startL1BlockNumber + i * 10)); + } + + return checkpoints; +} + +/** + * Creates a PublishedCheckpoint with attestations signed by the provided signers. + * Useful for testing attestation validation. + */ +export function makeSignedPublishedCheckpoint( + checkpoint: Checkpoint, + signers: Secp256k1Signer[], + committee: EthAddress[], + l1BlockNumber = 1, +): PublishedCheckpoint { + const attestations = signers.map(signer => makeCheckpointAttestationFromCheckpoint(checkpoint, signer)); + const committeeAttestations = orderAttestations(attestations, committee); + return new PublishedCheckpoint(checkpoint, makeL1PublishedData(l1BlockNumber), committeeAttestations); +} + +/** Creates a deterministic SiloedTag for private log testing. */ +export function makePrivateLogTag(blockNumber: number, txIndex: number, logIndex: number): SiloedTag { + return new SiloedTag( + blockNumber === 1 && txIndex === 0 && logIndex === 0 + ? Fr.ZERO + : new Fr(blockNumber * 100 + txIndex * 10 + logIndex), + ); +} + +/** Creates a PrivateLog with fields derived from the tag. */ +export function makePrivateLog(tag: SiloedTag): PrivateLog { + return PrivateLog.from({ + fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, i => (!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))), + emittedLength: PRIVATE_LOG_SIZE_IN_FIELDS, + }); +} + +/** Creates multiple private logs for a transaction. */ +export function mockPrivateLogs(blockNumber: number, txIndex: number, numLogsPerTx: number): PrivateLog[] { + return times(numLogsPerTx, logIndex => { + const tag = makePrivateLogTag(blockNumber, txIndex, logIndex); + return makePrivateLog(tag); + }); +} + +/** Creates a deterministic Tag for public log testing. */ +export function makePublicLogTag(blockNumber: number, txIndex: number, logIndex: number): Tag { + return new Tag( + blockNumber === 1 && txIndex === 0 && logIndex === 0 + ? Fr.ZERO + : new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * 123), + ); +} + +/** Creates a PublicLog with fields derived from the tag. */ +export function makePublicLog(tag: Tag, contractAddress: AztecAddress = AztecAddress.fromNumber(543254)): PublicLog { + return PublicLog.from({ + contractAddress, + fields: new Array(10).fill(null).map((_, i) => (!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))), + }); +} + +/** Creates multiple public logs for a transaction. */ +export function makePublicLogs( + blockNumber: number, + txIndex: number, + numLogsPerTx: number, + contractAddress: AztecAddress = AztecAddress.fromNumber(543254), +): PublicLog[] { + return times(numLogsPerTx, logIndex => { + const tag = makePublicLogTag(blockNumber, txIndex, logIndex); + return makePublicLog(tag, contractAddress); + }); +} + +export interface MockCheckpointWithLogsOptions { + previousArchive?: AppendOnlyTreeSnapshot; + numTxsPerBlock?: number; + privateLogs?: { numLogsPerTx: number }; + publicLogs?: { numLogsPerTx: number; contractAddress?: AztecAddress }; +} + +/** Creates a checkpoint with specified logs on each tx effect. */ +export async function makeCheckpointWithLogs( + blockNumber: number, + options: MockCheckpointWithLogsOptions = {}, +): Promise { + const { previousArchive, numTxsPerBlock = 4, privateLogs, publicLogs } = options; + + const block = await L2BlockNew.random(BlockNumber(blockNumber), { + checkpointNumber: CheckpointNumber(blockNumber), + indexWithinCheckpoint: 0, + state: makeStateForBlock(blockNumber, numTxsPerBlock), + ...(previousArchive ? { lastArchive: previousArchive } : {}), + }); + block.header.globalVariables.blockNumber = BlockNumber(blockNumber); + + block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex: number) => { + const txEffect = await TxEffect.random(); + txEffect.privateLogs = privateLogs ? mockPrivateLogs(blockNumber, txIndex, privateLogs.numLogsPerTx) : []; + txEffect.publicLogs = publicLogs + ? makePublicLogs(blockNumber, txIndex, publicLogs.numLogsPerTx, publicLogs.contractAddress) + : []; + return txEffect; + }); + + const checkpoint = new Checkpoint( + AppendOnlyTreeSnapshot.random(), + CheckpointHeader.random(), + [block], + CheckpointNumber(blockNumber), + ); + return makePublishedCheckpoint(checkpoint, blockNumber); +} diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index 50ae854ca6c6..d4c95bcd7e6e 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -1,16 +1,9 @@ -import { - Archiver, - type ArchiverConfig, - KVArchiverDataStore, - archiverConfigMappings, - getArchiverConfigFromEnv, -} from '@aztec/archiver'; +import { type ArchiverConfig, archiverConfigMappings, createArchiver, getArchiverConfigFromEnv } from '@aztec/archiver'; import { createLogger } from '@aztec/aztec.js/log'; import { type BlobClientConfig, blobClientConfigMapping, createBlobClient } from '@aztec/blob-client/client'; import { getL1Config } from '@aztec/cli/config'; import type { NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; -import { createStore } from '@aztec/kv-store/lmdb-v2'; import { ArchiverApiSchema } from '@aztec/stdlib/interfaces/server'; import { getConfigEnvVars as getTelemetryClientConfig, initTelemetryClient } from '@aztec/telemetry-client'; @@ -47,13 +40,9 @@ export async function startArchiver( archiverConfig.l1Contracts = addresses; archiverConfig = { ...archiverConfig, ...l1Config }; - const storeLog = createLogger('archiver:lmdb'); - const store = await createStore('archiver', KVArchiverDataStore.SCHEMA_VERSION, archiverConfig, storeLog); - const archiverStore = new KVArchiverDataStore(store, archiverConfig.maxLogs); - const telemetry = await initTelemetryClient(getTelemetryClientConfig()); const blobClient = createBlobClient(archiverConfig, { logger: createLogger('archiver:blob-client:client') }); - const archiver = await Archiver.createAndSync(archiverConfig, archiverStore, { telemetry, blobClient }, true); + const archiver = await createArchiver(archiverConfig, { telemetry, blobClient }, { blockUntilSync: true }); services.archiver = [archiver, ArchiverApiSchema]; signalHandlers.push(archiver.stop); diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index 4559cc48b4ce..79aada51f5d2 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -1,4 +1,4 @@ -import type { ArchiveSource } from '@aztec/archiver'; +import type { ArchiverDataSource } from '@aztec/archiver'; import { type AztecNodeConfig, getConfigEnvVars } from '@aztec/aztec-node'; import { AztecAddress } from '@aztec/aztec.js/addresses'; import type { L2Block } from '@aztec/aztec.js/block'; @@ -121,7 +121,7 @@ describe('L1Publisher integration', () => { let minFee: GasFees; - let blockSource: MockProxy; + let blockSource: MockProxy; let blocks: L2Block[] = []; const chainId = createEthereumChain(config.l1RpcUrls, config.l1ChainId).chainInfo.id; @@ -185,7 +185,7 @@ describe('L1Publisher integration', () => { builderDb = await NativeWorldStateService.tmp(EthAddress.fromString(rollupAddress)); blocks = []; - blockSource = mock({ + blockSource = mock({ getBlocks(from, limit, _proven) { return Promise.resolve(blocks.slice(from - 1, from - 1 + limit)); }, diff --git a/yarn-project/txe/src/state_machine/archiver.ts b/yarn-project/txe/src/state_machine/archiver.ts index f994324af0b1..dc31c667165f 100644 --- a/yarn-project/txe/src/state_machine/archiver.ts +++ b/yarn-project/txe/src/state_machine/archiver.ts @@ -1,27 +1,21 @@ -import { ArchiveSourceBase, KVArchiverDataStore, addCheckpointsWithContractData } from '@aztec/archiver'; +import { ArchiverDataSourceBase, ArchiverDataStoreUpdater, KVArchiverDataStore } from '@aztec/archiver'; import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants'; import { CheckpointNumber, type EpochNumber, type SlotNumber } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; -import { - type CheckpointId, - L2Block, - type L2BlockId, - type L2TipId, - type L2Tips, - type ValidateCheckpointResult, -} from '@aztec/stdlib/block'; -import { Checkpoint, type PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import type { CheckpointId, L2BlockId, L2TipId, L2Tips, ValidateCheckpointResult } from '@aztec/stdlib/block'; +import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; -import type { BlockHeader } from '@aztec/stdlib/tx'; /** * TXE Archiver implementation. * Provides most of the endpoints needed by the node for reading from and writing to state, * without needing any of the extra overhead that the Archiver itself requires (i.e. an L1 client). */ -export class TXEArchiver extends ArchiveSourceBase { +export class TXEArchiver extends ArchiverDataSourceBase { + private readonly updater = new ArchiverDataStoreUpdater(this.store); + constructor(db: AztecAsyncKVStore) { const store = new KVArchiverDataStore(db, 9999); super(store); @@ -29,7 +23,7 @@ export class TXEArchiver extends ArchiveSourceBase { // TXE-specific method for adding checkpoints public addCheckpoints(checkpoints: PublishedCheckpoint[], result?: ValidateCheckpointResult): Promise { - return addCheckpointsWithContractData(this.store, checkpoints, result); + return this.updater.addCheckpointsWithContractData(checkpoints, result); } // Abstract method implementations @@ -94,18 +88,6 @@ export class TXEArchiver extends ArchiveSourceBase { throw new Error('TXE Archiver does not implement "getL2EpochNumber"'); } - public getCheckpointsForEpoch(_epochNumber: EpochNumber): Promise { - throw new Error('TXE Archiver does not implement "getCheckpointsForEpoch"'); - } - - public getBlocksForEpoch(_epochNumber: EpochNumber): Promise { - throw new Error('TXE Archiver does not implement "getBlocksForEpoch"'); - } - - public getBlockHeadersForEpoch(_epochNumber: EpochNumber): Promise { - throw new Error('TXE Archiver does not implement "getBlockHeadersForEpoch"'); - } - public isEpochComplete(_epochNumber: EpochNumber): Promise { throw new Error('TXE Archiver does not implement "isEpochComplete"'); }