diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index d5ae427883ea..ea8205871385 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -411,7 +411,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo * @param _signatures - The signatures to validate * @param _digest - The digest to validate * @param _currentTime - The current time - * @param _blobsHash - The blobs hash for this block + * @param _blobsHashesCommitment - The blobs hash for this block * @param _flags - The flags to validate */ function validateHeader( @@ -419,7 +419,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, - bytes32 _blobsHash, + bytes32 _blobsHashesCommitment, DataStructures.ExecutionFlags memory _flags ) external view override(IRollup) { _validateHeader( @@ -428,7 +428,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo _digest, _currentTime, getManaBaseFeeAt(_currentTime, true), - _blobsHash, + _blobsHashesCommitment, _flags ); } @@ -441,7 +441,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo external view override(IRollup) - returns (bytes32, bytes32) + returns (bytes32[] memory, bytes32, bytes32) { return ExtRollupLib.validateBlobs(_blobsInput, checkBlob); } @@ -513,7 +513,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo // Since an invalid blob hash here would fail the consensus checks of // the header, the `blobInput` is implicitly accepted by consensus as well. - (bytes32 blobsHash, bytes32 blobPublicInputsHash) = + (bytes32[] memory blobHashes, bytes32 blobsHashesCommitment, bytes32 blobPublicInputsHash) = ExtRollupLib.validateBlobs(_blobInput, checkBlob); // Decode and validate header @@ -529,7 +529,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo _digest: _args.digest(), _currentTime: Timestamp.wrap(block.timestamp), _manaBaseFee: manaBaseFee, - _blobsHash: blobsHash, + _blobsHashesCommitment: blobsHashesCommitment, _flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false}) }); @@ -555,7 +555,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo (uint256 min,) = MerkleLib.computeMinMaxPathLength(header.contentCommitment.numTxs); OUTBOX.insert(blockNumber, header.contentCommitment.outHash, min + 1); - emit L2BlockProposed(blockNumber, _args.archive); + emit L2BlockProposed(blockNumber, _args.archive, blobHashes); // Automatically flag the block as proven if we have cheated and set assumeProvenThroughBlockNumber. if (blockNumber <= assumeProvenThroughBlockNumber) { @@ -828,7 +828,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo * @param _signatures - The signatures for the attestations * @param _digest - The digest that signatures signed * @param _currentTime - The time of execution - * @param _blobsHash - The blobs hash for this block + * @param _blobsHashesCommitment - The blobs hash for this block * @dev - This value is provided to allow for simple simulation of future * @param _flags - Flags specific to the execution, whether certain checks should be skipped */ @@ -838,7 +838,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo bytes32 _digest, Timestamp _currentTime, uint256 _manaBaseFee, - bytes32 _blobsHash, + bytes32 _blobsHashesCommitment, DataStructures.ExecutionFlags memory _flags ) internal view { uint256 pendingBlockNumber = canPruneAtTime(_currentTime) @@ -850,7 +850,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo header: _header, currentTime: _currentTime, manaBaseFee: _manaBaseFee, - blobsHash: _blobsHash, + blobsHashesCommitment: _blobsHashesCommitment, pendingBlockNumber: pendingBlockNumber, flags: _flags, version: VERSION, diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 44fd7963cdc5..0edcd7df1d40 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -77,7 +77,9 @@ interface ITestRollup { } interface IRollup { - event L2BlockProposed(uint256 indexed blockNumber, bytes32 indexed archive); + event L2BlockProposed( + uint256 indexed blockNumber, bytes32 indexed archive, bytes32[] versionedBlobHashes + ); event L2ProofVerified(uint256 indexed blockNumber, bytes32 indexed proverId); event PrunedPending(uint256 provenBlockNumber, uint256 pendingBlockNumber); event ProofRightClaimed( @@ -171,5 +173,8 @@ interface IRollup { bytes calldata _blobPublicInputs, bytes calldata _aggregationObject ) external view returns (bytes32[] memory); - function validateBlobs(bytes calldata _blobsInputs) external view returns (bytes32, bytes32); + function validateBlobs(bytes calldata _blobsInputs) + external + view + returns (bytes32[] memory, bytes32, bytes32); } diff --git a/l1-contracts/src/core/libraries/RollupLibs/BlobLib.sol b/l1-contracts/src/core/libraries/RollupLibs/BlobLib.sol index 26fa02529f68..9d51f35b12d2 100644 --- a/l1-contracts/src/core/libraries/RollupLibs/BlobLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/BlobLib.sol @@ -33,12 +33,17 @@ library BlobLib { function validateBlobs(bytes calldata _blobsInput, bool _checkBlob) internal view - returns (bytes32 blobsHash, bytes32 blobPublicInputsHash) + returns ( + // All of the blob hashes included in this blob + bytes32[] memory blobHashes, + bytes32 blobsHashesCommitment, + bytes32 blobPublicInputsHash + ) { // We cannot input the incorrect number of blobs below, as the blobsHash // and epoch proof verification will fail. uint8 numBlobs = uint8(_blobsInput[0]); - bytes32[] memory blobHashes = new bytes32[](numBlobs); + blobHashes = new bytes32[](numBlobs); bytes memory blobPublicInputs; for (uint256 i = 0; i < numBlobs; i++) { // Add 1 for the numBlobs prefix @@ -59,7 +64,7 @@ library BlobLib { // Return the hash of all z, y, and Cs, so we can use them in proof verification later blobPublicInputsHash = sha256(blobPublicInputs); // Hash the EVM blob hashes for the block header - blobsHash = Hash.sha256ToField(abi.encodePacked(blobHashes)); + blobsHashesCommitment = Hash.sha256ToField(abi.encodePacked(blobHashes)); } /** diff --git a/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol index 6100defd339e..aa8167d80298 100644 --- a/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol @@ -107,7 +107,11 @@ library ExtRollupLib { function validateBlobs(bytes calldata _blobsInput, bool _checkBlob) external view - returns (bytes32 blobsHash, bytes32 blobPublicInputsHash) + returns ( + bytes32[] memory blobHashes, + bytes32 blobsHashesCommitment, + bytes32 blobPublicInputsHash + ) { return BlobLib.validateBlobs(_blobsInput, _checkBlob); } diff --git a/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol index 3f8c3f242396..7d0b2861b38b 100644 --- a/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol @@ -16,7 +16,7 @@ struct ValidateHeaderArgs { Header header; Timestamp currentTime; uint256 manaBaseFee; - bytes32 blobsHash; + bytes32 blobsHashesCommitment; uint256 pendingBlockNumber; DataStructures.ExecutionFlags flags; uint256 version; @@ -76,7 +76,8 @@ library ValidationLib { // Check if the data is available require( - _args.flags.ignoreDA || _args.header.contentCommitment.blobsHash == _args.blobsHash, + _args.flags.ignoreDA + || _args.header.contentCommitment.blobsHash == _args.blobsHashesCommitment, Errors.Rollup__UnavailableTxs(_args.header.contentCommitment.blobsHash) ); diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 5ea4eedb6e62..602cb2a1bbae 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -181,6 +181,7 @@ describe('Archiver', () => { (b.header.globalVariables.timestamp = new Fr(now + DefaultL1ContractsConfig.ethereumSlotDuration * (i + 1))), ); const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash)); publicClient.getBlockNumber.mockResolvedValueOnce(2500n).mockResolvedValueOnce(2600n).mockResolvedValueOnce(2700n); @@ -201,14 +202,14 @@ describe('Archiver', () => { makeMessageSentEvent(98n, 1n, 0n); makeMessageSentEvent(99n, 1n, 1n); - makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString()); + makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]); makeMessageSentEvent(2504n, 2n, 0n); makeMessageSentEvent(2505n, 2n, 1n); makeMessageSentEvent(2505n, 2n, 2n); makeMessageSentEvent(2506n, 3n, 1n); - makeL2BlockProposedEvent(2510n, 2n, blocks[1].archive.root.toString()); - makeL2BlockProposedEvent(2520n, 3n, blocks[2].archive.root.toString()); + makeL2BlockProposedEvent(2510n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]); + makeL2BlockProposedEvent(2520n, 3n, blocks[2].archive.root.toString(), [blobHashes[2]]); publicClient.getTransaction.mockResolvedValueOnce(rollupTxs[0]); rollupTxs.slice(1).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); @@ -278,11 +279,13 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash)); // Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read. publicClient.getBlockNumber.mockResolvedValue(102n); const badArchive = Fr.random().toString(); + const badBlobHash = Fr.random().toString(); mockRollup.read.status.mockResolvedValue([0n, GENESIS_ROOT, 2n, blocks[1].archive.root.toString(), GENESIS_ROOT]); @@ -290,9 +293,9 @@ describe('Archiver', () => { makeMessageSentEvent(66n, 1n, 0n); makeMessageSentEvent(68n, 1n, 1n); - makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()); - makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); - makeL2BlockProposedEvent(90n, 3n, badArchive); + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]); + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]); + makeL2BlockProposedEvent(90n, 3n, badArchive, [badBlobHash]); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b))); @@ -321,6 +324,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash)); publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n); mockRollup.read.status @@ -331,8 +335,8 @@ describe('Archiver', () => { makeMessageSentEvent(66n, 1n, 0n); makeMessageSentEvent(68n, 1n, 1n); - makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()); - makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]); + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b))); @@ -358,6 +362,7 @@ describe('Archiver', () => { const numL2BlocksInTest = 2; const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); + const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash)); publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n).mockResolvedValueOnce(150n); @@ -381,8 +386,8 @@ describe('Archiver', () => { makeMessageSentEvent(66n, 1n, 0n); makeMessageSentEvent(68n, 1n, 1n); - makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()); - makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]); + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b))); @@ -427,11 +432,12 @@ describe('Archiver', () => { const l2Block = blocks[0]; l2Block.header.globalVariables.slotNumber = new Fr(notLastL2SlotInEpoch); blocks = [l2Block]; + const blobHashes = [await makeVersionedBlobHash(l2Block)]; const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); publicClient.getBlockNumber.mockResolvedValueOnce(l1BlockForL2Block); mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]); - makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString()); + makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString(), blobHashes); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b))); blobsFromBlocks.forEach(blob => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([blob])); @@ -460,11 +466,12 @@ describe('Archiver', () => { const l2Block = blocks[0]; l2Block.header.globalVariables.slotNumber = new Fr(lastL2SlotInEpoch); blocks = [l2Block]; + const blobHashes = [await makeVersionedBlobHash(l2Block)]; const rollupTxs = await Promise.all(blocks.map(makeRollupTx)); publicClient.getBlockNumber.mockResolvedValueOnce(l1BlockForL2Block); mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]); - makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString()); + makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString(), blobHashes); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b))); @@ -518,12 +525,17 @@ describe('Archiver', () => { * @param l1BlockNum - L1 block number. * @param l2BlockNum - L2 Block number. */ - const makeL2BlockProposedEvent = (l1BlockNum: bigint, l2BlockNum: bigint, archive: `0x${string}`) => { + const makeL2BlockProposedEvent = ( + l1BlockNum: bigint, + l2BlockNum: bigint, + archive: `0x${string}`, + versionedBlobHashes: `0x${string}`[], + ) => { const log = { blockNumber: l1BlockNum, - args: { blockNumber: l2BlockNum, archive }, + args: { blockNumber: l2BlockNum, archive, versionedBlobHashes }, transactionHash: `0x${l2BlockNum}`, - } as Log; + } as unknown as Log; l2BlockProposedLogs.push(log); }; @@ -572,6 +584,17 @@ async function makeRollupTx(l2Block: L2Block) { return { input } as Transaction; } +/** + * Makes a versioned blob hash for testing purposes. + * @param l2Block - The L2 block. + * @returns A versioned blob hash. + */ +async function makeVersionedBlobHash(l2Block: L2Block): Promise<`0x${string}`> { + return `0x${(await Blob.fromFields(l2Block.body.toBlobFields())) + .getEthVersionedBlobHash() + .toString('hex')}` as `0x${string}`; +} + /** * Blob response to be returned from the blob sink based on the expected block. * @param block - The block. diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 31905ade40c7..e14f2adbe0e5 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -98,10 +98,17 @@ export async function processL2BlockProposedLogs( const l2BlockNumber = log.args.blockNumber!; const archive = log.args.archive!; const archiveFromChain = await rollup.read.archiveAt([l2BlockNumber]); + const blobHashes = log.args.versionedBlobHashes!.map(blobHash => Buffer.from(blobHash.slice(2), 'hex')); // The value from the event and contract will match only if the block is in the chain. if (archive === archiveFromChain) { - const block = await getBlockFromRollupTx(publicClient, blobSinkClient, log.transactionHash!, l2BlockNumber); + const block = await getBlockFromRollupTx( + publicClient, + blobSinkClient, + log.transactionHash!, + blobHashes, + l2BlockNumber, + ); const l1: L1PublishedData = { blockNumber: log.blockNumber, @@ -139,6 +146,7 @@ async function getBlockFromRollupTx( publicClient: PublicClient, blobSinkClient: BlobSinkClientInterface, txHash: `0x${string}`, + blobHashes: Buffer[], // WORKTODO(md): buffer32? l2BlockNum: bigint, ): Promise { const { input: data, blockHash } = await publicClient.getTransaction({ hash: txHash }); @@ -169,7 +177,7 @@ async function getBlockFromRollupTx( const header = BlockHeader.fromBuffer(Buffer.from(hexToBytes(decodedArgs.header))); - const blobBodies = await blobSinkClient.getBlobSidecar(blockHash); + const blobBodies = await blobSinkClient.getBlobSidecar(blockHash, blobHashes); if (blobBodies.length === 0) { throw new NoBlobBodiesFoundError(Number(l2BlockNum)); } diff --git a/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts b/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts index 411b1436ffe9..b416a7b99c0f 100644 --- a/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts +++ b/yarn-project/blob-sink/src/client/blob-sink-client-tests.ts @@ -25,12 +25,13 @@ export function runBlobSinkClientTests( it('should send and retrieve blobs', async () => { const blob = await makeEncodedBlob(3); + const blobHash = blob.getEthVersionedBlobHash(); const blockId = '0x1234'; const success = await client.sendBlobsToBlobSink(blockId, [blob]); expect(success).toBe(true); - const retrievedBlobs = await client.getBlobSidecar(blockId); + const retrievedBlobs = await client.getBlobSidecar(blockId, [blobHash]); expect(retrievedBlobs).toHaveLength(1); expect(retrievedBlobs[0].fieldsHash.toString()).toBe(blob.fieldsHash.toString()); expect(retrievedBlobs[0].commitment.toString('hex')).toBe(blob.commitment.toString('hex')); @@ -38,12 +39,13 @@ export function runBlobSinkClientTests( it('should handle multiple blobs', async () => { const blobs = await Promise.all([makeEncodedBlob(2), makeEncodedBlob(2), makeEncodedBlob(2)]); + const blobHashes = blobs.map(blob => blob.getEthVersionedBlobHash()); const blockId = '0x5678'; const success = await client.sendBlobsToBlobSink(blockId, blobs); expect(success).toBe(true); - const retrievedBlobs = await client.getBlobSidecar(blockId); + const retrievedBlobs = await client.getBlobSidecar(blockId, blobHashes); expect(retrievedBlobs).toHaveLength(3); for (let i = 0; i < blobs.length; i++) { @@ -52,7 +54,7 @@ export function runBlobSinkClientTests( } // Can request blobs by index - const retrievedBlobsByIndex = await client.getBlobSidecar(blockId, [0, 2]); + const retrievedBlobsByIndex = await client.getBlobSidecar(blockId, blobHashes, [0, 2]); expect(retrievedBlobsByIndex).toHaveLength(2); expect(retrievedBlobsByIndex[0].fieldsHash.toString()).toBe(blobs[0].fieldsHash.toString()); expect(retrievedBlobsByIndex[1].fieldsHash.toString()).toBe(blobs[2].fieldsHash.toString()); @@ -60,7 +62,7 @@ export function runBlobSinkClientTests( it('should return empty array for non-existent block', async () => { const blockId = '0xnonexistent'; - const retrievedBlobs = await client.getBlobSidecar(blockId); + const retrievedBlobs = await client.getBlobSidecar(blockId, [Buffer.from([0x0])]); expect(retrievedBlobs).toEqual([]); }); } diff --git a/yarn-project/blob-sink/src/client/http.test.ts b/yarn-project/blob-sink/src/client/http.test.ts index 5da17a7a631d..6af0e4f587bb 100644 --- a/yarn-project/blob-sink/src/client/http.test.ts +++ b/yarn-project/blob-sink/src/client/http.test.ts @@ -31,11 +31,12 @@ describe('HttpBlobSinkClient', () => { it('should handle server connection errors gracefully', async () => { const client = new HttpBlobSinkClient({ blobSinkUrl: 'http://localhost:12345' }); // Invalid port const blob = await Blob.fromFields([Fr.random()]); + const blobHash = blob.getEthVersionedBlobHash(); const success = await client.sendBlobsToBlobSink('0x1234', [blob]); expect(success).toBe(false); - const retrievedBlobs = await client.getBlobSidecar('0x1234'); + const retrievedBlobs = await client.getBlobSidecar('0x1234', [blobHash]); expect(retrievedBlobs).toEqual([]); }); @@ -43,6 +44,11 @@ describe('HttpBlobSinkClient', () => { let blobSinkServer: BlobSinkServer; let testBlob: Blob; + let testBlobHash: Buffer; + + // A blob to be ignored when requesting blobs + // - we do not include it's blobHash in our queries + let testBlobIgnore: Blob; let executionHostServer: http.Server | undefined = undefined; let executionHostPort: number | undefined = undefined; @@ -54,6 +60,9 @@ describe('HttpBlobSinkClient', () => { beforeEach(async () => { testBlob = await makeEncodedBlob(3); + testBlobHash = testBlob.getEthVersionedBlobHash(); + + testBlobIgnore = await makeEncodedBlob(3); }); const startExecutionHostServer = (): Promise => { @@ -88,6 +97,14 @@ describe('HttpBlobSinkClient', () => { // eslint-disable-next-line camelcase kzg_proof: `0x${testBlob.proof.toString('hex')}`, }, + { + index: 1, + blob: `0x${Buffer.from(testBlobIgnore.data).toString('hex')}`, + // eslint-disable-next-line camelcase + kzg_commitment: `0x${testBlobIgnore.commitment.toString('hex')}`, + // eslint-disable-next-line camelcase + kzg_proof: `0x${testBlobIgnore.proof.toString('hex')}`, + }, ], }), ); @@ -133,7 +150,7 @@ describe('HttpBlobSinkClient', () => { const success = await client.sendBlobsToBlobSink('0x1234', [testBlob]); expect(success).toBe(true); - const retrievedBlobs = await client.getBlobSidecar('0x1234'); + const retrievedBlobs = await client.getBlobSidecar('0x1234', [testBlobHash]); expect(retrievedBlobs).toEqual([testBlob]); // Check that the blob sink was called with the correct block hash and no index @@ -160,7 +177,7 @@ describe('HttpBlobSinkClient', () => { const success = await client.sendBlobsToBlobSink('0x1234', [testBlob]); expect(success).toBe(true); - const retrievedBlobs = await client.getBlobSidecar('0x1234'); + const retrievedBlobs = await client.getBlobSidecar('0x1234', [testBlobHash]); expect(retrievedBlobs).toEqual([testBlob]); }); }); diff --git a/yarn-project/blob-sink/src/client/http.ts b/yarn-project/blob-sink/src/client/http.ts index 7434fc5f1af0..b8d9070bb436 100644 --- a/yarn-project/blob-sink/src/client/http.ts +++ b/yarn-project/blob-sink/src/client/http.ts @@ -65,29 +65,31 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { * @param indices - The indices of the blobs to get * @returns The blobs */ - public async getBlobSidecar(blockHash: string, indices?: number[]): Promise { + public async getBlobSidecar(blockHash: string, blobHashes: Buffer[], indices?: number[]): Promise { + let blobs: Blob[] = []; if (this.config.blobSinkUrl) { this.log.debug('Getting blob sidecar from blob sink'); - const blobs = await this.getBlobSidecarFrom(this.config.blobSinkUrl, blockHash, indices); - if (blobs.length > 0) { - this.log.debug(`Got ${blobs.length} blobs from blob sink`); - return blobs; - } + blobs = await this.getBlobSidecarFrom(this.config.blobSinkUrl, blockHash, indices); + this.log.debug(`Got ${blobs.length} blobs from blob sink`); } - if (this.config.l1ConsensusHostUrl) { + if (blobs.length == 0 && this.config.l1ConsensusHostUrl) { // The beacon api can query by slot number, so we get that first this.log.debug('Getting slot number from consensus host'); const slotNumber = await this.getSlotNumber(blockHash); if (slotNumber) { const blobs = await this.getBlobSidecarFrom(this.config.l1ConsensusHostUrl, slotNumber, indices); + this.log.debug(`Got ${blobs.length} blobs from consensus host`); if (blobs.length > 0) { - this.log.debug(`Got ${blobs.length} blobs from consensus host`); return blobs; } } } + if (blobs.length > 0) { + return filterRelevantBlobs(blobs, blobHashes); + } + this.log.verbose('No blob sources available'); return []; } @@ -190,3 +192,16 @@ export class HttpBlobSinkClient implements BlobSinkClientInterface { return undefined; } } + +/** + * Filter blobs based on a list of blob hashes + * @param blobs + * @param blobHashes + * @returns + */ +function filterRelevantBlobs(blobs: Blob[], blobHashes: Buffer[]): Blob[] { + return blobs.filter(blob => { + const blobHash = blob.getEthVersionedBlobHash(); + return blobHashes.some(hash => hash.equals(blobHash)); + }); +} diff --git a/yarn-project/blob-sink/src/client/interface.ts b/yarn-project/blob-sink/src/client/interface.ts index c98e450a6bcb..ac098c0bb479 100644 --- a/yarn-project/blob-sink/src/client/interface.ts +++ b/yarn-project/blob-sink/src/client/interface.ts @@ -2,5 +2,5 @@ import { type Blob } from '@aztec/foundation/blob'; export interface BlobSinkClientInterface { sendBlobsToBlobSink(blockId: string, blobs: Blob[]): Promise; - getBlobSidecar(blockId: string, indices?: number[]): Promise; + getBlobSidecar(blockId: string, blobHashes: Buffer[], indices?: number[]): Promise; } diff --git a/yarn-project/blob-sink/src/client/local.ts b/yarn-project/blob-sink/src/client/local.ts index df18903a611f..44985bf2cb6a 100644 --- a/yarn-project/blob-sink/src/client/local.ts +++ b/yarn-project/blob-sink/src/client/local.ts @@ -19,11 +19,16 @@ export class LocalBlobSinkClient implements BlobSinkClientInterface { return true; } - public async getBlobSidecar(blockId: string, indices?: number[]): Promise { + public async getBlobSidecar(blockId: string, blobHashes: Buffer[], indices?: number[]): Promise { const blobSidecars = await this.blobStore.getBlobSidecars(blockId, indices); if (!blobSidecars) { return []; } - return blobSidecars.map(blob => blob.blob); + return blobSidecars + .filter(blob => { + const blobHash = blob.blob.getEthVersionedBlobHash(); + return blobHashes.some(hash => hash.equals(blobHash)); + }) + .map(blob => blob.blob); } }