Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
f597ea8
feat(spartan): cl layer in spartan
Maddiaa0 Nov 20, 2024
abeb13c
fix: dirty rebase
Maddiaa0 Jan 18, 2025
76cc51d
fix: dirty merge
Maddiaa0 Jan 18, 2025
ee07f27
fix: dirty merge
Maddiaa0 Jan 18, 2025
fe5c7b9
fix
Maddiaa0 Jan 18, 2025
0f60492
fix: typo
Maddiaa0 Jan 18, 2025
cbeb8db
fix: dirty rebase
Maddiaa0 Jan 18, 2025
4f13f27
feat(spartan): cl layer in spartan
Maddiaa0 Nov 20, 2024
4b916e9
fix: dirty rebase
Maddiaa0 Jan 18, 2025
1b9a6f2
fix: dirty rebase
Maddiaa0 Jan 18, 2025
a0b46d8
feat: http client can read from consensus layer
Maddiaa0 Jan 16, 2025
0732751
commit
Maddiaa0 Jan 16, 2025
e79c0de
config
Maddiaa0 Jan 16, 2025
1878bf6
feat: encoded blobs
Maddiaa0 Jan 17, 2025
57801b3
fix: external host
Maddiaa0 Jan 17, 2025
02ef985
fix: archiver tests, document blobs
Maddiaa0 Jan 17, 2025
e060066
feat: native testnet
Maddiaa0 Jan 17, 2025
d5331b0
fix: running promise test
Maddiaa0 Jan 17, 2025
d03b3e0
fix: field reader tests
Maddiaa0 Jan 17, 2025
1ecd034
fix: enncoding
Maddiaa0 Jan 17, 2025
47cb894
fix: test
Maddiaa0 Jan 17, 2025
0c16770
fix: javascript adding a string to a number
Maddiaa0 Jan 17, 2025
f203d8e
chore: kill different blob sink urls
Maddiaa0 Jan 17, 2025
b1f8add
fix: allow larger transactions
Maddiaa0 Jan 17, 2025
7781259
Merge branch 'master' into md/get-data-from-cl
Maddiaa0 Jan 21, 2025
9187177
Merge branch 'master' into md/get-data-from-cl
Maddiaa0 Jan 21, 2025
9a4a974
fix: native testnet script
Maddiaa0 Jan 21, 2025
53e391d
chore: allow l1 consensus host url to be unset
Maddiaa0 Jan 21, 2025
f8f0d39
Merge branch 'master' into md/get-data-from-cl
Maddiaa0 Jan 21, 2025
d9ac2c8
feat: add blob hashes to rollup log
Maddiaa0 Jan 22, 2025
ba4a7c3
feat: include blob hashes filter in blob sink query
Maddiaa0 Jan 22, 2025
6fe85fd
Merge branch 'master' into md/get-data-from-cl
Maddiaa0 Jan 24, 2025
02f3aa7
Merge branch 'master' into md/get-data-from-cl
Maddiaa0 Jan 24, 2025
335cda7
Merge branch 'md/get-data-from-cl' into md/include-blob-hashes
Maddiaa0 Jan 24, 2025
807165b
fix: update local blob sink client
Maddiaa0 Jan 24, 2025
d1ec5d2
tmp
Maddiaa0 Jan 25, 2025
c2c201c
fix: update readme
Maddiaa0 Jan 25, 2025
b025304
fix: remove unused collector url
Maddiaa0 Jan 25, 2025
7a79323
fix: update text
Maddiaa0 Jan 25, 2025
6e020fc
Merge branch 'md/get-data-from-cl' into md/include-blob-hashes
Maddiaa0 Jan 25, 2025
b915415
fix: update blob filtering
Maddiaa0 Jan 25, 2025
f125f4e
fix: archiver tests
Maddiaa0 Jan 27, 2025
66cbfd3
Merge branch 'master' into md/include-blob-hashes
Maddiaa0 Jan 27, 2025
72a994d
Merge branch 'master' into md/include-blob-hashes
Maddiaa0 Jan 29, 2025
e965b34
fix: await make versioned blob hash
Maddiaa0 Jan 29, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions l1-contracts/src/core/Rollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -411,15 +411,15 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
* @param _signatures - The signatures to validate
* @param _digest - The digest to validate
* @param _currentTime - The current time
* @param _blobsHash - The blobs hash for this block
* @param _blobsHashesCommitment - The blobs hash for this block
* @param _flags - The flags to validate
*/
function validateHeader(
bytes calldata _header,
Signature[] memory _signatures,
bytes32 _digest,
Timestamp _currentTime,
bytes32 _blobsHash,
bytes32 _blobsHashesCommitment,
DataStructures.ExecutionFlags memory _flags
) external view override(IRollup) {
_validateHeader(
Expand All @@ -428,7 +428,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
_digest,
_currentTime,
getManaBaseFeeAt(_currentTime, true),
_blobsHash,
_blobsHashesCommitment,
_flags
);
}
Expand All @@ -441,7 +441,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
external
view
override(IRollup)
returns (bytes32, bytes32)
returns (bytes32[] memory, bytes32, bytes32)
{
return ExtRollupLib.validateBlobs(_blobsInput, checkBlob);
}
Expand Down Expand Up @@ -513,7 +513,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo

// Since an invalid blob hash here would fail the consensus checks of
// the header, the `blobInput` is implicitly accepted by consensus as well.
(bytes32 blobsHash, bytes32 blobPublicInputsHash) =
(bytes32[] memory blobHashes, bytes32 blobsHashesCommitment, bytes32 blobPublicInputsHash) =
ExtRollupLib.validateBlobs(_blobInput, checkBlob);

// Decode and validate header
Expand All @@ -529,7 +529,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
_digest: _args.digest(),
_currentTime: Timestamp.wrap(block.timestamp),
_manaBaseFee: manaBaseFee,
_blobsHash: blobsHash,
_blobsHashesCommitment: blobsHashesCommitment,
_flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false})
});

Expand All @@ -555,7 +555,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
(uint256 min,) = MerkleLib.computeMinMaxPathLength(header.contentCommitment.numTxs);
OUTBOX.insert(blockNumber, header.contentCommitment.outHash, min + 1);

emit L2BlockProposed(blockNumber, _args.archive);
emit L2BlockProposed(blockNumber, _args.archive, blobHashes);

// Automatically flag the block as proven if we have cheated and set assumeProvenThroughBlockNumber.
if (blockNumber <= assumeProvenThroughBlockNumber) {
Expand Down Expand Up @@ -828,7 +828,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
* @param _signatures - The signatures for the attestations
* @param _digest - The digest that signatures signed
* @param _currentTime - The time of execution
* @param _blobsHash - The blobs hash for this block
* @param _blobsHashesCommitment - The blobs hash for this block
* @dev - This value is provided to allow for simple simulation of future
* @param _flags - Flags specific to the execution, whether certain checks should be skipped
*/
Expand All @@ -838,7 +838,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
bytes32 _digest,
Timestamp _currentTime,
uint256 _manaBaseFee,
bytes32 _blobsHash,
bytes32 _blobsHashesCommitment,
DataStructures.ExecutionFlags memory _flags
) internal view {
uint256 pendingBlockNumber = canPruneAtTime(_currentTime)
Expand All @@ -850,7 +850,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, ValidatorSelection, IRo
header: _header,
currentTime: _currentTime,
manaBaseFee: _manaBaseFee,
blobsHash: _blobsHash,
blobsHashesCommitment: _blobsHashesCommitment,
pendingBlockNumber: pendingBlockNumber,
flags: _flags,
version: VERSION,
Expand Down
9 changes: 7 additions & 2 deletions l1-contracts/src/core/interfaces/IRollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,9 @@ interface ITestRollup {
}

interface IRollup {
event L2BlockProposed(uint256 indexed blockNumber, bytes32 indexed archive);
event L2BlockProposed(
uint256 indexed blockNumber, bytes32 indexed archive, bytes32[] versionedBlobHashes
);
event L2ProofVerified(uint256 indexed blockNumber, bytes32 indexed proverId);
event PrunedPending(uint256 provenBlockNumber, uint256 pendingBlockNumber);
event ProofRightClaimed(
Expand Down Expand Up @@ -171,5 +173,8 @@ interface IRollup {
bytes calldata _blobPublicInputs,
bytes calldata _aggregationObject
) external view returns (bytes32[] memory);
function validateBlobs(bytes calldata _blobsInputs) external view returns (bytes32, bytes32);
function validateBlobs(bytes calldata _blobsInputs)
external
view
returns (bytes32[] memory, bytes32, bytes32);
}
11 changes: 8 additions & 3 deletions l1-contracts/src/core/libraries/RollupLibs/BlobLib.sol
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,17 @@ library BlobLib {
function validateBlobs(bytes calldata _blobsInput, bool _checkBlob)
internal
view
returns (bytes32 blobsHash, bytes32 blobPublicInputsHash)
returns (
// All of the blob hashes included in this blob
bytes32[] memory blobHashes,
bytes32 blobsHashesCommitment,
bytes32 blobPublicInputsHash
)
{
// We cannot input the incorrect number of blobs below, as the blobsHash
// and epoch proof verification will fail.
uint8 numBlobs = uint8(_blobsInput[0]);
bytes32[] memory blobHashes = new bytes32[](numBlobs);
blobHashes = new bytes32[](numBlobs);
bytes memory blobPublicInputs;
for (uint256 i = 0; i < numBlobs; i++) {
// Add 1 for the numBlobs prefix
Expand All @@ -59,7 +64,7 @@ library BlobLib {
// Return the hash of all z, y, and Cs, so we can use them in proof verification later
blobPublicInputsHash = sha256(blobPublicInputs);
// Hash the EVM blob hashes for the block header
blobsHash = Hash.sha256ToField(abi.encodePacked(blobHashes));
blobsHashesCommitment = Hash.sha256ToField(abi.encodePacked(blobHashes));
}

/**
Expand Down
6 changes: 5 additions & 1 deletion l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,11 @@ library ExtRollupLib {
function validateBlobs(bytes calldata _blobsInput, bool _checkBlob)
external
view
returns (bytes32 blobsHash, bytes32 blobPublicInputsHash)
returns (
bytes32[] memory blobHashes,
bytes32 blobsHashesCommitment,
bytes32 blobPublicInputsHash
)
{
return BlobLib.validateBlobs(_blobsInput, _checkBlob);
}
Expand Down
5 changes: 3 additions & 2 deletions l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ struct ValidateHeaderArgs {
Header header;
Timestamp currentTime;
uint256 manaBaseFee;
bytes32 blobsHash;
bytes32 blobsHashesCommitment;
uint256 pendingBlockNumber;
DataStructures.ExecutionFlags flags;
uint256 version;
Expand Down Expand Up @@ -76,7 +76,8 @@ library ValidationLib {

// Check if the data is available
require(
_args.flags.ignoreDA || _args.header.contentCommitment.blobsHash == _args.blobsHash,
_args.flags.ignoreDA
|| _args.header.contentCommitment.blobsHash == _args.blobsHashesCommitment,
Errors.Rollup__UnavailableTxs(_args.header.contentCommitment.blobsHash)
);

Expand Down
53 changes: 38 additions & 15 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ describe('Archiver', () => {
(b.header.globalVariables.timestamp = new Fr(now + DefaultL1ContractsConfig.ethereumSlotDuration * (i + 1))),
);
const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash));

publicClient.getBlockNumber.mockResolvedValueOnce(2500n).mockResolvedValueOnce(2600n).mockResolvedValueOnce(2700n);

Expand All @@ -201,14 +202,14 @@ describe('Archiver', () => {

makeMessageSentEvent(98n, 1n, 0n);
makeMessageSentEvent(99n, 1n, 1n);
makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString());
makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]);

makeMessageSentEvent(2504n, 2n, 0n);
makeMessageSentEvent(2505n, 2n, 1n);
makeMessageSentEvent(2505n, 2n, 2n);
makeMessageSentEvent(2506n, 3n, 1n);
makeL2BlockProposedEvent(2510n, 2n, blocks[1].archive.root.toString());
makeL2BlockProposedEvent(2520n, 3n, blocks[2].archive.root.toString());
makeL2BlockProposedEvent(2510n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]);
makeL2BlockProposedEvent(2520n, 3n, blocks[2].archive.root.toString(), [blobHashes[2]]);
publicClient.getTransaction.mockResolvedValueOnce(rollupTxs[0]);

rollupTxs.slice(1).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
Expand Down Expand Up @@ -278,21 +279,23 @@ describe('Archiver', () => {
const numL2BlocksInTest = 2;

const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash));

// Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read.
publicClient.getBlockNumber.mockResolvedValue(102n);

const badArchive = Fr.random().toString();
const badBlobHash = Fr.random().toString();

mockRollup.read.status.mockResolvedValue([0n, GENESIS_ROOT, 2n, blocks[1].archive.root.toString(), GENESIS_ROOT]);

mockInbox.read.totalMessagesInserted.mockResolvedValueOnce(2n).mockResolvedValueOnce(2n);

makeMessageSentEvent(66n, 1n, 0n);
makeMessageSentEvent(68n, 1n, 1n);
makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString());
makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString());
makeL2BlockProposedEvent(90n, 3n, badArchive);
makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]);
makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]);
makeL2BlockProposedEvent(90n, 3n, badArchive, [badBlobHash]);

rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b)));
Expand Down Expand Up @@ -321,6 +324,7 @@ describe('Archiver', () => {
const numL2BlocksInTest = 2;

const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash));

publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n);
mockRollup.read.status
Expand All @@ -331,8 +335,8 @@ describe('Archiver', () => {

makeMessageSentEvent(66n, 1n, 0n);
makeMessageSentEvent(68n, 1n, 1n);
makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString());
makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString());
makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]);
makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]);

rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b)));
Expand All @@ -358,6 +362,7 @@ describe('Archiver', () => {
const numL2BlocksInTest = 2;

const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
const blobHashes = await Promise.all(blocks.map(makeVersionedBlobHash));

publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n).mockResolvedValueOnce(150n);

Expand All @@ -381,8 +386,8 @@ describe('Archiver', () => {

makeMessageSentEvent(66n, 1n, 0n);
makeMessageSentEvent(68n, 1n, 1n);
makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString());
makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString());
makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString(), [blobHashes[0]]);
makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString(), [blobHashes[1]]);

rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b)));
Expand Down Expand Up @@ -427,11 +432,12 @@ describe('Archiver', () => {
const l2Block = blocks[0];
l2Block.header.globalVariables.slotNumber = new Fr(notLastL2SlotInEpoch);
blocks = [l2Block];
const blobHashes = [await makeVersionedBlobHash(l2Block)];

const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
publicClient.getBlockNumber.mockResolvedValueOnce(l1BlockForL2Block);
mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]);
makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString());
makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString(), blobHashes);
rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b)));
blobsFromBlocks.forEach(blob => blobSinkClient.getBlobSidecar.mockResolvedValueOnce([blob]));
Expand Down Expand Up @@ -460,11 +466,12 @@ describe('Archiver', () => {
const l2Block = blocks[0];
l2Block.header.globalVariables.slotNumber = new Fr(lastL2SlotInEpoch);
blocks = [l2Block];
const blobHashes = [await makeVersionedBlobHash(l2Block)];

const rollupTxs = await Promise.all(blocks.map(makeRollupTx));
publicClient.getBlockNumber.mockResolvedValueOnce(l1BlockForL2Block);
mockRollup.read.status.mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, l2Block.archive.root.toString(), GENESIS_ROOT]);
makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString());
makeL2BlockProposedEvent(l1BlockForL2Block, 1n, l2Block.archive.root.toString(), blobHashes);

rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
const blobsFromBlocks = await Promise.all(blocks.map(b => makeBlobFromBlock(b)));
Expand Down Expand Up @@ -518,12 +525,17 @@ describe('Archiver', () => {
* @param l1BlockNum - L1 block number.
* @param l2BlockNum - L2 Block number.
*/
const makeL2BlockProposedEvent = (l1BlockNum: bigint, l2BlockNum: bigint, archive: `0x${string}`) => {
const makeL2BlockProposedEvent = (
l1BlockNum: bigint,
l2BlockNum: bigint,
archive: `0x${string}`,
versionedBlobHashes: `0x${string}`[],
) => {
const log = {
blockNumber: l1BlockNum,
args: { blockNumber: l2BlockNum, archive },
args: { blockNumber: l2BlockNum, archive, versionedBlobHashes },
transactionHash: `0x${l2BlockNum}`,
} as Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProposed'>;
} as unknown as Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProposed'>;
l2BlockProposedLogs.push(log);
};

Expand Down Expand Up @@ -572,6 +584,17 @@ async function makeRollupTx(l2Block: L2Block) {
return { input } as Transaction<bigint, number>;
}

/**
* Makes a versioned blob hash for testing purposes.
* @param l2Block - The L2 block.
* @returns A versioned blob hash.
*/
async function makeVersionedBlobHash(l2Block: L2Block): Promise<`0x${string}`> {
return `0x${(await Blob.fromFields(l2Block.body.toBlobFields()))
.getEthVersionedBlobHash()
.toString('hex')}` as `0x${string}`;
}

/**
* Blob response to be returned from the blob sink based on the expected block.
* @param block - The block.
Expand Down
12 changes: 10 additions & 2 deletions yarn-project/archiver/src/archiver/data_retrieval.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,17 @@ export async function processL2BlockProposedLogs(
const l2BlockNumber = log.args.blockNumber!;
const archive = log.args.archive!;
const archiveFromChain = await rollup.read.archiveAt([l2BlockNumber]);
const blobHashes = log.args.versionedBlobHashes!.map(blobHash => Buffer.from(blobHash.slice(2), 'hex'));

// The value from the event and contract will match only if the block is in the chain.
if (archive === archiveFromChain) {
const block = await getBlockFromRollupTx(publicClient, blobSinkClient, log.transactionHash!, l2BlockNumber);
const block = await getBlockFromRollupTx(
publicClient,
blobSinkClient,
log.transactionHash!,
blobHashes,
l2BlockNumber,
);

const l1: L1PublishedData = {
blockNumber: log.blockNumber,
Expand Down Expand Up @@ -139,6 +146,7 @@ async function getBlockFromRollupTx(
publicClient: PublicClient,
blobSinkClient: BlobSinkClientInterface,
txHash: `0x${string}`,
blobHashes: Buffer[], // WORKTODO(md): buffer32?
l2BlockNum: bigint,
): Promise<L2Block> {
const { input: data, blockHash } = await publicClient.getTransaction({ hash: txHash });
Expand Down Expand Up @@ -169,7 +177,7 @@ async function getBlockFromRollupTx(

const header = BlockHeader.fromBuffer(Buffer.from(hexToBytes(decodedArgs.header)));

const blobBodies = await blobSinkClient.getBlobSidecar(blockHash);
const blobBodies = await blobSinkClient.getBlobSidecar(blockHash, blobHashes);
if (blobBodies.length === 0) {
throw new NoBlobBodiesFoundError(Number(l2BlockNum));
}
Expand Down
Loading