diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 82db79ada62f..bf11afbaa743 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -30,7 +30,7 @@ contract Rollup is IRollup { uint256 public immutable VERSION; AvailabilityOracle public immutable AVAILABILITY_ORACLE; - bytes32 public rollupStateHash; + bytes32 public archive; // Root of the archive tree uint256 public lastBlockTs; // Tracks the last time time was warped on L2 ("warp" is the testing cheatcode). // See https://github.com/AztecProtocol/aztec-packages/issues/1614 @@ -44,23 +44,29 @@ contract Rollup is IRollup { } /** - * @notice Process an incoming L2Block and progress the state - * @param _proof - The proof of correct execution - * @param _l2Block - The L2Block data, formatted as outlined in `Decoder.sol` + * @notice Process an incoming L2 block and progress the state + * @param _header - The L2 block header. + * @param _archive - A snapshot (root and next available leaf index) of the archive tree after the L2 block is applied + * @param _body - The L2 block body. + * @param _proof - The proof of correct execution. */ - function process(bytes memory _proof, bytes calldata _l2Block) external override(IRollup) { - _constrainGlobals(_l2Block); + function process( + bytes calldata _header, + bytes calldata _archive, + bytes calldata _body, // Note: this will be replaced with _txsHash once the separation is finished. + bytes memory _proof + ) external override(IRollup) { + // TODO: @benejsan Should we represent this values from header as a nice struct? + HeaderDecoder.Header memory header = HeaderDecoder.decode(_header); - // Decode the header - (uint256 l2BlockNumber, bytes32 oldStateHash, bytes32 newStateHash) = - HeaderDecoder.decode(_l2Block[:HeaderDecoder.BLOCK_HEADER_SIZE]); + _validateHeader(header); // Check if the data is available using availability oracle (change availability oracle if you want a different DA layer) bytes32 txsHash; { // @todo @LHerskind Hack such that the node is unchanged for now. // should be removed when we have a proper block publication. - txsHash = AVAILABILITY_ORACLE.publish(_l2Block[HeaderDecoder.BLOCK_HEADER_SIZE:]); + txsHash = AVAILABILITY_ORACLE.publish(_body); } if (!AVAILABILITY_ORACLE.isAvailable(txsHash)) { @@ -70,10 +76,7 @@ contract Rollup is IRollup { // Decode the cross-chain messages (bytes32 inHash,, bytes32[] memory l1ToL2Msgs, bytes32[] memory l2ToL1Msgs) = - MessagesDecoder.decode(_l2Block[HeaderDecoder.BLOCK_HEADER_SIZE:]); - - bytes32 publicInputHash = - _computePublicInputHash(_l2Block[:HeaderDecoder.BLOCK_HEADER_SIZE], txsHash, inHash); + MessagesDecoder.decode(_body); // @todo @LHerskind Proper genesis state. If the state is empty, we allow anything for now. // TODO(#3936): Temporarily disabling this because L2Block encoding has not yet been updated. @@ -82,13 +85,14 @@ contract Rollup is IRollup { // } bytes32[] memory publicInputs = new bytes32[](1); - publicInputs[0] = publicInputHash; + publicInputs[0] = _computePublicInputHash(_header, txsHash, inHash); if (!VERIFIER.verify(_proof, publicInputs)) { revert Errors.Rollup__InvalidProof(); } - rollupStateHash = newStateHash; + // TODO: @benejsan Manually extracting the root here is ugly. TODO: Re-think how to represent archive snap. + archive = bytes32(_header[:0x20]); lastBlockTs = block.timestamp; // @todo (issue #605) handle fee collector @@ -98,24 +102,19 @@ contract Rollup is IRollup { IOutbox outbox = REGISTRY.getOutbox(); outbox.sendL1Messages(l2ToL1Msgs); - emit L2BlockProcessed(l2BlockNumber); + emit L2BlockProcessed(header.blockNumber); } - function _constrainGlobals(bytes calldata _header) internal view { - uint256 chainId = uint256(bytes32(_header[:0x20])); - uint256 version = uint256(bytes32(_header[0x20:0x40])); - uint256 ts = uint256(bytes32(_header[0x60:0x80])); - // block number already constrained by start state hash - - if (block.chainid != chainId) { - revert Errors.Rollup__InvalidChainId(chainId, block.chainid); + function _validateHeader(HeaderDecoder.Header memory header) internal view { + if (block.chainid != header.chainId) { + revert Errors.Rollup__InvalidChainId(header.chainId, block.chainid); } - if (version != VERSION) { - revert Errors.Rollup__InvalidVersion(version, VERSION); + if (header.version != VERSION) { + revert Errors.Rollup__InvalidVersion(header.version, VERSION); } - if (ts > block.timestamp) { + if (header.timestamp > block.timestamp) { revert Errors.Rollup__TimestampInFuture(); } @@ -123,9 +122,13 @@ contract Rollup is IRollup { // This will make multiple l2 blocks in the same l1 block impractical. // e.g., the first block will update timestamp which will make the second fail. // Could possibly allow multiple blocks if in same l1 block - if (ts < lastBlockTs) { + if (header.timestamp < lastBlockTs) { revert Errors.Rollup__TimestampTooOld(); } + + if (archive != header.lastArchive) { + revert Errors.Rollup__InvalidArchive(archive, header.lastArchive); + } } function _computePublicInputHash(bytes calldata _header, bytes32 _txsHash, bytes32 _inHash) diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index ab2fca20e181..264625275ff1 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -3,7 +3,12 @@ pragma solidity >=0.8.18; interface IRollup { - event L2BlockProcessed(uint256 indexed blockNum); + event L2BlockProcessed(uint256 indexed blockNumber); - function process(bytes memory _proof, bytes calldata _l2Block) external; + function process( + bytes calldata _header, + bytes calldata _archive, + bytes calldata _body, + bytes memory _proof + ) external; } diff --git a/l1-contracts/src/core/libraries/Errors.sol b/l1-contracts/src/core/libraries/Errors.sol index e8d6785f1833..39ec6a692bc2 100644 --- a/l1-contracts/src/core/libraries/Errors.sol +++ b/l1-contracts/src/core/libraries/Errors.sol @@ -48,7 +48,7 @@ library Errors { ); // 0x5e789f34 // Rollup - error Rollup__InvalidStateHash(bytes32 expected, bytes32 actual); // 0xa3cfaab3 + error Rollup__InvalidArchive(bytes32 expected, bytes32 actual); // 0xb682a40e error Rollup__InvalidProof(); // 0xa5b2ba17 error Rollup__InvalidChainId(uint256 expected, uint256 actual); // 0x37b5bc12 error Rollup__InvalidVersion(uint256 expected, uint256 actual); // 0x9ef30794 diff --git a/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol b/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol index f2e7778e1f6c..a016eb0ab0af 100644 --- a/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol +++ b/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol @@ -20,37 +20,43 @@ import {Hash} from "../Hash.sol"; * * | byte start | num bytes | name * | --- | --- | --- - * | 0x0000 | 0x20 | chain-id - * | 0x0020 | 0x20 | version - * | 0x0040 | 0x20 | L2 block number - * | 0x0060 | 0x20 | L2 timestamp - * | 0x0080 | 0x20 | startNoteHashTreeSnapshot.root - * | 0x00a0 | 0x04 | startNoteHashTreeSnapshot.nextAvailableLeafIndex - * | 0x00a4 | 0x20 | startNullifierTreeSnapshot.root - * | 0x00c4 | 0x04 | startNullifierTreeSnapshot.nextAvailableLeafIndex - * | 0x00c8 | 0x20 | startContractTreeSnapshot.root - * | 0x00e8 | 0x04 | startContractTreeSnapshot.nextAvailableLeafIndex - * | 0x00ec | 0x20 | startPublicDataTreeSnapshot.root - * | 0x010c | 0x04 | startPublicDataTreeSnapshot.nextAvailableLeafIndex - * | 0x0110 | 0x20 | startL1ToL2MessageTreeSnapshot.root - * | 0x0130 | 0x04 | startL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x0134 | 0x20 | startArchiveSnapshot.root - * | 0x0154 | 0x04 | startArchiveSnapshot.nextAvailableLeafIndex - * | 0x0158 | 0x20 | endNoteHashTreeSnapshot.root - * | 0x0178 | 0x04 | endNoteHashTreeSnapshot.nextAvailableLeafIndex - * | 0x017c | 0x20 | endNullifierTreeSnapshot.root - * | 0x019c | 0x04 | endNullifierTreeSnapshot.nextAvailableLeafIndex - * | 0x01a0 | 0x20 | endContractTreeSnapshot.root - * | 0x01c0 | 0x04 | endContractTreeSnapshot.nextAvailableLeafIndex - * | 0x01c4 | 0x20 | endPublicDataTreeSnapshot.root - * | 0x01e4 | 0x04 | endPublicDataTreeSnapshot.nextAvailableLeafIndex - * | 0x01e8 | 0x20 | endL1ToL2MessageTreeSnapshot.root - * | 0x0208 | 0x04 | endL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x020c | 0x20 | endArchiveSnapshot.root - * | 0x022c | 0x04 | endArchiveSnapshot.nextAvailableLeafIndex + * | | | Header { + * | | | GlobalVariables { + * | 0x0000 | 0x20 | chainId + * | 0x0020 | 0x20 | version + * | 0x0040 | 0x20 | blockNumber + * | 0x0060 | 0x20 | timestamp + * | | | } + * | | | StateReference { + * | 0x0080 | 0x20 | l1ToL2MessageTree.root + * | 0x00a0 | 0x04 | l1ToL2MessageTree.nextAvailableLeafIndex + * | | | PartialStateReference { + * | 0x00a4 | 0x20 | noteHashTree.root + * | 0x00c4 | 0x04 | noteHashTree.nextAvailableLeafIndex + * | 0x00c8 | 0x20 | nullifierTree.root + * | 0x00e8 | 0x04 | nullifierTree.nextAvailableLeafIndex + * | 0x00ec | 0x20 | contractTree.root + * | 0x010c | 0x04 | contractTree.nextAvailableLeafIndex + * | 0x0110 | 0x20 | publicDataTree.root + * | 0x0130 | 0x04 | publicDataTree.nextAvailableLeafIndex + * | | | } + * | | | } + * | 0x0134 | 0x20 | lastArchive.root + * | 0x0154 | 0x04 | lastArchive.nextAvailableLeafIndex + * | 0x0158 | 0x20 | bodyHash + * | | | } * | --- | --- | --- */ library HeaderDecoder { + // TODO: This is only partial + struct Header { + uint256 chainId; + uint256 version; + uint256 blockNumber; + uint256 timestamp; + bytes32 lastArchive; + } + // DECODING OFFSET CONSTANTS // Where the start of trees metadata begins in the block uint256 private constant START_TREES_BLOCK_HEADER_OFFSET = 0x80; @@ -68,21 +74,21 @@ library HeaderDecoder { /** * @notice Decodes the header - * @param _header - The L2 block calldata. - * @return l2BlockNumber - The L2 block number - * @return startStateHash - The start state hash - * @return endStateHash - The end state hash + * @param _header - The header calldata. */ - function decode(bytes calldata _header) - internal - pure - returns (uint256 l2BlockNumber, bytes32 startStateHash, bytes32 endStateHash) - { - l2BlockNumber = uint256(bytes32(_header[0x40:0x60])); - // Note, for startStateHash to match the storage, the l2 block number must be new - 1. - // Only jumping 1 block at a time. - startStateHash = computeStateHash(l2BlockNumber - 1, START_TREES_BLOCK_HEADER_OFFSET, _header); - endStateHash = computeStateHash(l2BlockNumber, END_TREES_BLOCK_HEADER_OFFSET, _header); + function decode(bytes calldata _header) internal pure returns (Header memory) { + Header memory header; + + header.chainId = uint256(bytes32(_header[:0x20])); + header.version = uint256(bytes32(_header[0x20:0x40])); + header.blockNumber = uint256(bytes32(_header[0x40:0x60])); + header.timestamp = uint256(bytes32(_header[0x60:0x80])); + + // The rest is needed only by verifier and hence not decoded here. + + header.lastArchive = bytes32(_header[0x134:0x154]); + + return header; } /** diff --git a/l1-contracts/test/decoders/Base.sol b/l1-contracts/test/decoders/Base.sol index ee95efe7a46f..266abeabe062 100644 --- a/l1-contracts/test/decoders/Base.sol +++ b/l1-contracts/test/decoders/Base.sol @@ -24,14 +24,15 @@ contract DecoderBase is Test { } struct Data { + // TODO(benejsan): Use HeaderDecoder.Header here? + uint256 chainId; + uint256 version; uint256 blockNumber; - bytes body; - bytes32 calldataHash; - bytes32 endStateHash; - bytes32 l1ToL2MessagesHash; - bytes32 publicInputsHash; - bytes32 startStateHash; uint256 timestamp; + bytes32 lastArchive; + bytes32 archive; + bytes header; + bytes body; } function load(string memory name) public view returns (Full memory) { diff --git a/l1-contracts/test/decoders/Decoder.t.sol b/l1-contracts/test/decoders/Decoder.t.sol index c846f12a29a1..94319fca5531 100644 --- a/l1-contracts/test/decoders/Decoder.t.sol +++ b/l1-contracts/test/decoders/Decoder.t.sol @@ -20,7 +20,7 @@ contract HeaderDecoderHelper { function decode(bytes calldata _header) public pure - returns (uint256 l2BlockNumber, bytes32 startStateHash, bytes32 endStateHash) + returns (HeaderDecoder.Header memory) { return HeaderDecoder.decode(_header); } @@ -78,29 +78,29 @@ contract DecoderTest is DecoderBase { function _testDecodeBlock(string memory name) public virtual { DecoderBase.Full memory data = load(name); - // Using the FULL decoder. - ( - uint256 l2BlockNumber, - bytes32 startStateHash, - bytes32 endStateHash, - bytes32 publicInputsHash, - bytes32[] memory l2ToL1Msgs, - bytes32[] memory l1ToL2Msgs - ) = helper.decode(data.block.body); - (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = - helper.computeDiffRootAndMessagesHash(data.block.body); + // // Using the FULL decoder. + // ( + // uint256 l2BlockNumber, + // bytes32 startStateHash, + // bytes32 endStateHash, + // bytes32 publicInputsHash, + // bytes32[] memory l2ToL1Msgs, + // bytes32[] memory l1ToL2Msgs + // ) = helper.decode(data.block.body); + // (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = + // helper.computeDiffRootAndMessagesHash(data.block.body); // Header { - (uint256 headerL2BlockNumber, bytes32 headerStartStateHash, bytes32 headerEndStateHash) = + HeaderDecoder.Header memory header = headerHelper.decode(data.block.body); - assertEq(l2BlockNumber, data.block.blockNumber, "Invalid block number"); - assertEq(headerL2BlockNumber, data.block.blockNumber, "Invalid block number"); - assertEq(startStateHash, data.block.startStateHash, "Invalid start state hash"); - assertEq(headerStartStateHash, data.block.startStateHash, "Invalid start state hash"); - assertEq(endStateHash, data.block.endStateHash, "Invalid end state hash"); - assertEq(headerEndStateHash, data.block.endStateHash, "Invalid end state hash"); + assertEq(header.chainId, data.block.chainId, "Invalid chain Id"); + assertEq(header.version, data.block.version, "Invalid version"); + assertEq(header.blockNumber, data.block.blockNumber, "Invalid block number"); + assertEq(header.timestamp, data.block.timestamp, "Invalid timestamp"); + assertEq(header.lastArchive, data.block.lastArchive, "Invalid last archive"); + // assertEq(header.archive, data.block.archive, "Invalid archive"); } // Messages diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index d873da2de833..cacd3560a773 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -269,7 +269,7 @@ describe('Archiver', () => { function makeL2BlockProcessedEvent(l1BlockNum: bigint, l2BlockNum: bigint) { return { blockNumber: l1BlockNum, - args: { blockNum: l2BlockNum }, + args: { blockNumber: l2BlockNum }, transactionHash: `0x${l2BlockNum}`, } as Log; } @@ -349,8 +349,10 @@ function makeL1ToL2MessageCancelledEvents(l1BlockNum: bigint, entryKeys: string[ * @returns A fake tx with calldata that corresponds to calling process in the Rollup contract. */ function makeRollupTx(l2Block: L2Block) { + const header = toHex(l2Block.header.toBuffer()); + const archive = toHex(l2Block.archive.toBuffer()); + const body = toHex(l2Block.bodyToBuffer()); const proof = `0x`; - const block = toHex(l2Block.toBufferWithLogs()); - const input = encodeFunctionData({ abi: RollupAbi, functionName: 'process', args: [proof, block] }); + const input = encodeFunctionData({ abi: RollupAbi, functionName: 'process', args: [header, archive, body, proof] }); return { input } as Transaction; } diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index ebdd62a3e98b..0f0df679f3b6 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -76,12 +76,12 @@ export async function processBlockLogs( ): Promise { const retrievedBlocks: L2Block[] = []; for (const log of logs) { - const blockNum = log.args.blockNum; + const blockNum = log.args.blockNumber; if (blockNum !== expectedL2BlockNumber) { throw new Error('Block number mismatch. Expected: ' + expectedL2BlockNumber + ' but got: ' + blockNum + '.'); } // TODO: Fetch blocks from calldata in parallel - const newBlock = await getBlockFromCallData(publicClient, log.transactionHash!, log.args.blockNum); + const newBlock = await getBlockFromCallData(publicClient, log.transactionHash!, log.args.blockNumber); newBlock.setL1BlockNumber(log.blockNumber!); retrievedBlocks.push(newBlock); expectedL2BlockNumber++; @@ -112,8 +112,14 @@ async function getBlockFromCallData( if (functionName !== 'process') { throw new Error(`Unexpected method called ${functionName}`); } - const [, l2BlockHex] = args! as [Hex, Hex]; - const block = L2Block.fromBufferWithLogs(Buffer.from(hexToBytes(l2BlockHex))); + // TODO(benesjan): This is brittle and should be handled inside the L2 Block. + const [headerHex, archiveHex, bodyHex] = args! as [Hex, Hex, Hex, Hex]; + const blockBuffer = Buffer.concat([ + Buffer.from(hexToBytes(headerHex)), + Buffer.from(hexToBytes(archiveHex)), + Buffer.from(hexToBytes(bodyHex)), + ]); + const block = L2Block.fromBufferWithLogs(blockBuffer); if (BigInt(block.number) !== l2BlockNum) { throw new Error(`Block number mismatch: expected ${l2BlockNum} but got ${block.number}`); } diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index f3d570d3705d..e0ada8a4fca3 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -223,7 +223,7 @@ export class L2Block { */ newContractData: ContractData[]; /** - * The L1 to L2 messages to be inserted into the L2 toL2 message tree. + * The L1 to L2 messages to be inserted into the L1 to L2 message tree. */ newL1ToL2Messages: Fr[]; /** @@ -263,19 +263,7 @@ export class L2Block { */ toBuffer() { return serializeToBuffer( - this.header.globalVariables, - // TODO(#3868) - AppendOnlyTreeSnapshot.empty(), // this.startNoteHashTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startNullifierTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startContractTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startPublicDataTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startL1ToL2MessageTreeSnapshot, - this.header.lastArchive, - this.header.state.partial.noteHashTree, - this.header.state.partial.nullifierTree, - this.header.state.partial.contractTree, - this.header.state.partial.publicDataTree, - this.header.state.l1ToL2MessageTree, + this.header, this.archive, this.newCommitments.length, this.newCommitments, @@ -308,6 +296,30 @@ export class L2Block { return serializeToBuffer(this.toBuffer(), this.newEncryptedLogs, this.newUnencryptedLogs); } + bodyToBuffer(): Buffer { + if (this.newEncryptedLogs === undefined || this.newUnencryptedLogs === undefined) { + throw new Error( + `newEncryptedLogs and newUnencryptedLogs must be defined when encoding L2BlockData (block ${this.header.globalVariables.blockNumber})`, + ); + } + + return serializeToBuffer( + this.newCommitments.length, + this.newCommitments, + this.newNullifiers.length, + this.newNullifiers, + this.newPublicDataWrites.length, + this.newPublicDataWrites, + this.newL2ToL1Msgs.length, + this.newL2ToL1Msgs, + this.newContracts.length, + this.newContracts, + this.newContractData, + this.newL1ToL2Messages.length, + this.newL1ToL2Messages, this.newEncryptedLogs, this.newUnencryptedLogs + ); + } + /** * Serializes a block without logs to a string. * @remarks This is used when the block is being served via JSON-RPC because the logs are expected to be served @@ -326,20 +338,8 @@ export class L2Block { */ static fromBuffer(buf: Buffer | BufferReader, blockHash?: Buffer) { const reader = BufferReader.asReader(buf); - const globalVariables = reader.readObject(GlobalVariables); - // TODO(#3938): update the encoding here - reader.readObject(AppendOnlyTreeSnapshot); // startNoteHashTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startNullifierTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startContractTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startPublicDataTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startL1ToL2MessageTreeSnapshot - const startArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endNoteHashTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endNullifierTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endContractTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endPublicDataTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endL1ToL2MessageTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); + const header = reader.readObject(Header); + const archive = reader.readObject(AppendOnlyTreeSnapshot); const newCommitments = reader.readVector(Fr); const newNullifiers = reader.readVector(Fr); const newPublicDataWrites = reader.readVector(PublicDataWrite); @@ -349,19 +349,9 @@ export class L2Block { // TODO(sean): could an optimization of this be that it is encoded such that zeros are assumed const newL1ToL2Messages = reader.readVector(Fr); - const partial = new PartialStateReference( - endNoteHashTreeSnapshot, - endNullifierTreeSnapshot, - endContractTreeSnapshot, - endPublicDataTreeSnapshot, - ); - const state = new StateReference(endL1ToL2MessageTreeSnapshot, partial); - // TODO(#3938): populate bodyHash - const header = new Header(startArchiveSnapshot, [Fr.ZERO, Fr.ZERO], state, globalVariables); - return L2Block.fromFields( { - archive: endArchiveSnapshot, + archive, header, newCommitments, newNullifiers, @@ -534,6 +524,7 @@ export class L2Block { * and inside the circuit, it is part of the public inputs. * @returns The calldata hash. */ + // TODO(benesjan): Update to getBodyHash getCalldataHash() { if (this.newEncryptedLogs === undefined) { throw new Error('Encrypted logs has to be attached before calling "getCalldataHash"'); diff --git a/yarn-project/circuits.js/src/structs/global_variables.ts b/yarn-project/circuits.js/src/structs/global_variables.ts index 7fd594767725..2a569fb984c0 100644 --- a/yarn-project/circuits.js/src/structs/global_variables.ts +++ b/yarn-project/circuits.js/src/structs/global_variables.ts @@ -53,6 +53,7 @@ export class GlobalVariables { } static getFields(fields: FieldsOf) { + // Note: The order here must match the order in the HeaderDecoder solidity library. return [fields.chainId, fields.version, fields.blockNumber, fields.timestamp] as const; } diff --git a/yarn-project/circuits.js/src/structs/header.test.ts b/yarn-project/circuits.js/src/structs/header.test.ts new file mode 100644 index 000000000000..4cc4e60d8b5d --- /dev/null +++ b/yarn-project/circuits.js/src/structs/header.test.ts @@ -0,0 +1,12 @@ +import { makeHeader } from '../tests/factories.js'; +import { Header } from './header.js'; + +describe('Header', () => { + it(`serializes to buffer and deserializes it back`, () => { + const randomInt = Math.floor(Math.random() * 1000); + const expected = makeHeader(randomInt, undefined); + const buffer = expected.toBuffer(); + const res = Header.fromBuffer(buffer); + expect(res).toEqual(expected); + }); +}); diff --git a/yarn-project/circuits.js/src/structs/header.ts b/yarn-project/circuits.js/src/structs/header.ts index 13e518a73065..43dc5bfdab63 100644 --- a/yarn-project/circuits.js/src/structs/header.ts +++ b/yarn-project/circuits.js/src/structs/header.ts @@ -1,33 +1,38 @@ -import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { NUM_FIELDS_PER_SHA256 } from '../constants.gen.js'; import { GlobalVariables } from './global_variables.js'; import { AppendOnlyTreeSnapshot } from './rollup/append_only_tree_snapshot.js'; import { StateReference } from './state_reference.js'; +export const NUM_BYTES_PER_SHA256 = 32; + /** A header of an L2 block. */ export class Header { constructor( /** Snapshot of archive before the block is applied. */ public lastArchive: AppendOnlyTreeSnapshot, /** Hash of the body of an L2 block. */ - public bodyHash: [Fr, Fr], + public bodyHash: Buffer, /** State reference. */ public state: StateReference, /** Global variables of an L2 block. */ public globalVariables: GlobalVariables, - ) {} + ) { + if (bodyHash.length !== 32) { + throw new Error('Body hash buffer must be 32 bytes'); + } + } toBuffer() { - return serializeToBuffer(this.lastArchive, this.bodyHash, this.state, this.globalVariables); + // Note: The order here must match the order in the HeaderDecoder solidity library. + return serializeToBuffer(this.globalVariables, this.state, this.lastArchive, this.bodyHash); } static fromBuffer(buffer: Buffer | BufferReader): Header { const reader = BufferReader.asReader(buffer); return new Header( reader.readObject(AppendOnlyTreeSnapshot), - reader.readArray(NUM_FIELDS_PER_SHA256, Fr) as [Fr, Fr], + reader.readBytes(NUM_BYTES_PER_SHA256), reader.readObject(StateReference), reader.readObject(GlobalVariables), ); diff --git a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts index 5a4b34df659d..2a839e6eda6e 100644 --- a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts @@ -95,23 +95,6 @@ export class RootRollupPublicInputs { return new RootRollupPublicInputs(...RootRollupPublicInputs.getFields(fields)); } - /** - * Returns the sha256 hash of the calldata. - * @returns The sha256 hash of the calldata. - */ - public sha256CalldataHash(): Buffer { - const high = this.header.bodyHash[0].toBuffer(); - const low = this.header.bodyHash[1].toBuffer(); - - const hash = Buffer.alloc(32); - for (let i = 0; i < 16; i++) { - hash[i] = high[i + 16]; - hash[i + 16] = low[i + 16]; - } - - return hash; - } - /** * Deserializes a buffer into a `RootRollupPublicInputs` object. * @param buffer - The buffer to deserialize. diff --git a/yarn-project/circuits.js/src/structs/state_reference.ts b/yarn-project/circuits.js/src/structs/state_reference.ts index 88b649f601bb..21c9359434f5 100644 --- a/yarn-project/circuits.js/src/structs/state_reference.ts +++ b/yarn-project/circuits.js/src/structs/state_reference.ts @@ -15,6 +15,7 @@ export class StateReference { ) {} toBuffer() { + // Note: The order here must match the order in the HeaderDecoder solidity library. return serializeToBuffer(this.l1ToL2MessageTree, this.partial); } diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 93439eea2bf1..6586d23ceb18 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -3,6 +3,8 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { EthAddress } from '@aztec/foundation/eth-address'; import { numToUInt32BE } from '@aztec/foundation/serialize'; +import { randomBytes } from 'crypto'; + import { SchnorrSignature } from '../barretenberg/index.js'; import { ARCHIVE_HEIGHT, @@ -101,7 +103,7 @@ import { WitnessedPublicCallData, } from '../index.js'; import { GlobalVariables } from '../structs/global_variables.js'; -import { Header } from '../structs/header.js'; +import { Header, NUM_BYTES_PER_SHA256 } from '../structs/header.js'; /** * Creates an arbitrary side effect object with the given seed. @@ -882,7 +884,7 @@ export function makeRootRollupPublicInputs( export function makeHeader(seed = 0, globalVariables: GlobalVariables | undefined): Header { return new Header( makeAppendOnlyTreeSnapshot(seed + 0x100), - [new Fr(5n), new Fr(6n)], + randomBytes(NUM_BYTES_PER_SHA256), makeStateReference(seed + 0x200), globalVariables ?? makeGlobalVariables((seed += 0x100)), ); diff --git a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts index a4f2156ad331..3af1bfd9e9f1 100644 --- a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts @@ -264,14 +264,21 @@ describe('L1Publisher integration', () => { l2ToL1Messages: block.newL2ToL1Msgs.map(m => `0x${m.toBuffer().toString('hex').padStart(64, '0')}`), }, block: { + // The json formatting in forge is a bit brittle, so we convert Fr to a number in the few values bellow. + // This should not be a problem for testing as long as the values are not larger than u32. + chainId: Number(block.header.globalVariables.chainId.toBigInt()), + version: Number(block.header.globalVariables.version.toBigInt()), blockNumber: block.number, - startStateHash: `0x${block.getStartStateHash().toString('hex').padStart(64, '0')}`, - endStateHash: `0x${block.getEndStateHash().toString('hex').padStart(64, '0')}`, - publicInputsHash: `0x${block.getPublicInputsHash().toBuffer().toString('hex').padStart(64, '0')}`, - calldataHash: `0x${block.getCalldataHash().toString('hex').padStart(64, '0')}`, - l1ToL2MessagesHash: `0x${block.getL1ToL2MessagesHash().toString('hex').padStart(64, '0')}`, + timestamp: Number(block.header.globalVariables.timestamp.toBigInt()), + lastArchive: `0x${block.header.lastArchive.root.toBuffer().toString('hex').padStart(64, '0')}`, + header: `0x${block.bodyToBuffer().toString('hex')}`, body: `0x${block.toBufferWithLogs().toString('hex')}`, - timestamp: Number(block.header.globalVariables.timestamp.toBigInt()), // The json formatting in forge is a bit brittle, so we convert to a number here. This should not be a problem for testing as longs as the timestamp is not larger than u32. + + // startStateHash: `0x${block.getStartStateHash().toString('hex').padStart(64, '0')}`, + // endStateHash: `0x${block.getEndStateHash().toString('hex').padStart(64, '0')}`, + // publicInputsHash: `0x${block.getPublicInputsHash().toBuffer().toString('hex').padStart(64, '0')}`, + // calldataHash: `0x${block.getCalldataHash().toString('hex').padStart(64, '0')}`, + // l1ToL2MessagesHash: `0x${block.getL1ToL2MessagesHash().toString('hex').padStart(64, '0')}`, }, }; @@ -280,7 +287,7 @@ describe('L1Publisher integration', () => { }; it(`Build ${numberOfConsecutiveBlocks} blocks of 4 bloated txs building on each other`, async () => { - const stateInRollup_ = await rollup.read.rollupStateHash(); + const stateInRollup_ = await rollup.read.archive(); expect(hexStringToBuffer(stateInRollup_.toString())).toEqual(Buffer.alloc(32, 0)); const blockNumber = await publicClient.getBlockNumber(); @@ -351,7 +358,7 @@ describe('L1Publisher integration', () => { writeJson(`mixed_block_${i}`, block, l1ToL2Messages, l1ToL2Content, recipientAddress, deployerAccount.address); - await publisher.processL2Block(block); + await publisher.publishL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, @@ -362,7 +369,7 @@ describe('L1Publisher integration', () => { fromBlock: blockNumber + 1n, }); expect(logs).toHaveLength(i + 1); - expect(logs[i].args.blockNum).toEqual(BigInt(i + 1)); + expect(logs[i].args.blockNumber).toEqual(BigInt(i + 1)); const ethTx = await publicClient.getTransaction({ hash: logs[i].transactionHash!, @@ -371,14 +378,19 @@ describe('L1Publisher integration', () => { const expectedData = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [`0x${l2Proof.toString('hex')}`, `0x${block.toBufferWithLogs().toString('hex')}`], + args: [ + `0x${block.header.toBuffer().toString('hex')}`, + `0x${block.archive.toBuffer().toString('hex')}`, + `0x${block.bodyToBuffer().toString('hex')}`, + `0x${l2Proof.toString('hex')}`, + ], }); expect(ethTx.input).toEqual(expectedData); const decoderArgs = [`0x${block.toBufferWithLogs().toString('hex')}`] as const; const decodedHashes = await decoderHelper.read.computeDiffRootAndMessagesHash(decoderArgs); const decodedRes = await decoderHelper.read.decode(decoderArgs); - const stateInRollup = await rollup.read.rollupStateHash(); + const stateInRollup = await rollup.read.archive(); expect(block.number).toEqual(Number(decodedRes[0])); expect(block.getStartStateHash()).toEqual(hexStringToBuffer(decodedRes[1].toString())); @@ -403,7 +415,7 @@ describe('L1Publisher integration', () => { }, 360_000); it(`Build ${numberOfConsecutiveBlocks} blocks of 4 empty txs building on each other`, async () => { - const stateInRollup_ = await rollup.read.rollupStateHash(); + const stateInRollup_ = await rollup.read.archive(); expect(hexStringToBuffer(stateInRollup_.toString())).toEqual(Buffer.alloc(32, 0)); const blockNumber = await publicClient.getBlockNumber(); @@ -427,7 +439,7 @@ describe('L1Publisher integration', () => { writeJson(`empty_block_${i}`, block, l1ToL2Messages, [], AztecAddress.ZERO, deployerAccount.address); - await publisher.processL2Block(block); + await publisher.publishL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, @@ -438,7 +450,7 @@ describe('L1Publisher integration', () => { fromBlock: blockNumber + 1n, }); expect(logs).toHaveLength(i + 1); - expect(logs[i].args.blockNum).toEqual(BigInt(i + 1)); + expect(logs[i].args.blockNumber).toEqual(BigInt(i + 1)); const ethTx = await publicClient.getTransaction({ hash: logs[i].transactionHash!, @@ -447,14 +459,19 @@ describe('L1Publisher integration', () => { const expectedData = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [`0x${l2Proof.toString('hex')}`, `0x${block.toBufferWithLogs().toString('hex')}`], + args: [ + `0x${block.header.toBuffer().toString('hex')}`, + `0x${block.archive.toBuffer().toString('hex')}`, + `0x${block.bodyToBuffer().toString('hex')}`, + `0x${l2Proof.toString('hex')}`, + ], }); expect(ethTx.input).toEqual(expectedData); const decoderArgs = [`0x${block.toBufferWithLogs().toString('hex')}`] as const; const decodedHashes = await decoderHelper.read.computeDiffRootAndMessagesHash(decoderArgs); const decodedRes = await decoderHelper.read.decode(decoderArgs); - const stateInRollup = await rollup.read.rollupStateHash(); + const stateInRollup = await rollup.read.archive(); expect(block.number).toEqual(Number(decodedRes[0])); expect(block.getStartStateHash()).toEqual(hexStringToBuffer(decodedRes[1].toString())); diff --git a/yarn-project/foundation/src/serialize/free_funcs.test.ts b/yarn-project/foundation/src/serialize/free_funcs.test.ts new file mode 100644 index 000000000000..e3be26dd2d88 --- /dev/null +++ b/yarn-project/foundation/src/serialize/free_funcs.test.ts @@ -0,0 +1,18 @@ +import { randomBytes } from '../crypto/index.js'; +import { from2Fields, to2Fields } from './free_funcs.js'; + +describe('buffer to fields and back', () => { + it('should correctly serialize and deserialize a buffer', () => { + // Generate a random 32-byte buffer + const originalBuffer = randomBytes(32); + + // Serialize the buffer to two fields + const [field1, field2] = to2Fields(originalBuffer); + + // Deserialize the fields back to a buffer + const reconstructedBuffer = from2Fields(field1, field2); + + // Check if the original buffer and reconstructed buffer are identical + expect(reconstructedBuffer).toEqual(originalBuffer); + }); +}); diff --git a/yarn-project/foundation/src/serialize/free_funcs.ts b/yarn-project/foundation/src/serialize/free_funcs.ts index 6165657f0f90..a9f9dcdfc1ef 100644 --- a/yarn-project/foundation/src/serialize/free_funcs.ts +++ b/yarn-project/foundation/src/serialize/free_funcs.ts @@ -126,3 +126,22 @@ export function to2Fields(buf: Buffer): [Fr, Fr] { return [Fr.fromBuffer(buf1), Fr.fromBuffer(buf2)]; } + +/** + * Reconstructs the original 32 bytes of data from 2 field elements. + * @param field1 - First field element + * @param field2 - Second field element + * @returns 32 bytes of data as a Buffer + */ +export function from2Fields(field1: Fr, field2: Fr): Buffer { + // Convert the field elements back to buffers + const buf1 = field1.toBuffer(); + const buf2 = field2.toBuffer(); + + // Remove the padding (first 16 bytes) from each buffer + const originalPart1 = buf1.slice(16, 32); + const originalPart2 = buf2.slice(16, 32); + + // Concatenate the two parts to form the original buffer + return Buffer.concat([originalPart1, originalPart2]); +} diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr index 0fff41cc8fee..e44769c9719c 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr @@ -65,11 +65,6 @@ impl RootRollupInputs { 0 ); - let zeroed_out_snapshot = AppendOnlyTreeSnapshot { - root : 0, - next_available_leaf_index : 0 - }; - let header = Header { last_archive: left.constants.last_archive, body_hash: components::compute_calldata_hash(self.previous_rollup_data), diff --git a/yarn-project/noir-protocol-circuits/src/type_conversion.ts b/yarn-project/noir-protocol-circuits/src/type_conversion.ts index 260d5946f8e2..9b598111b4ae 100644 --- a/yarn-project/noir-protocol-circuits/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits/src/type_conversion.ts @@ -72,7 +72,7 @@ import { TxContext, TxRequest, } from '@aztec/circuits.js'; -import { Tuple, mapTuple } from '@aztec/foundation/serialize'; +import { Tuple, from2Fields, mapTuple } from '@aztec/foundation/serialize'; import { BlockHeader as BlockHeaderNoir, @@ -137,6 +137,8 @@ import { AppendOnlyTreeSnapshot as AppendOnlyTreeSnapshotNoir, BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir, ConstantRollupData as ConstantRollupDataNoir, + Field, + FixedLengthArray, GlobalVariables as GlobalVariablesNoir, Header as HeaderNoir, PartialStateReference as PartialStateReferenceNoir, @@ -678,6 +680,15 @@ export function mapTupleFromNoir( return Array.from({ length }, (_, idx) => mapper(noirArray[idx])) as Tuple; } +/** + * Maps a SHA256 hash from noir to the parsed type. + * @param hash - The hash as it is represented in Noir (2 fields). + * @returns The hash represented as a 32 bytes long buffer. + */ +export function mapSha256HashFromNoir(hash: FixedLengthArray): Buffer { + return from2Fields(mapFieldFromNoir(hash[0]), mapFieldFromNoir(hash[1])); +} + /** * Maps optionally revealed data from noir to the parsed type. * @param optionallyRevealedData - The noir optionally revealed data. @@ -1318,7 +1329,7 @@ export function mapRootRollupPublicInputsFromNoir( export function mapHeaderFromNoir(header: HeaderNoir): Header { return new Header( mapAppendOnlyTreeSnapshotFromNoir(header.last_archive), - mapTupleFromNoir(header.body_hash, 2, mapFieldFromNoir), + mapSha256HashFromNoir(header.body_hash), mapStateReferenceFromNoir(header.state), mapGlobalVariablesFromNoir(header.global_variables), ); diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts index 5a390b36fa6f..a0e03bb5afc8 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts @@ -250,11 +250,7 @@ describe('sequencer/solo_block_builder', () => { newUnencryptedLogs, }); - const callDataHash = l2Block.getCalldataHash(); - const high = Fr.fromBuffer(callDataHash.slice(0, 16)); - const low = Fr.fromBuffer(callDataHash.slice(16, 32)); - - rootRollupOutput.header.bodyHash = [high, low]; + rootRollupOutput.header.bodyHash = l2Block.getCalldataHash(); return txs; }; diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts index d8958a6d4091..fd74a0908162 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts @@ -144,11 +144,12 @@ export class SoloBlockBuilder implements BlockBuilder { newUnencryptedLogs, }); - if (!l2Block.getCalldataHash().equals(circuitsOutput.sha256CalldataHash())) { + // TODO: update naming here + if (!l2Block.getCalldataHash().equals(circuitsOutput.header.bodyHash)) { throw new Error( - `Calldata hash mismatch, ${l2Block.getCalldataHash().toString('hex')} == ${circuitsOutput - .sha256CalldataHash() - .toString('hex')} `, + `Calldata hash mismatch, ${l2Block + .getCalldataHash() + .toString('hex')} == ${circuitsOutput.header.bodyHash.toString('hex')} `, ); } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 123203a59dde..8ac7eb20b84d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -30,7 +30,7 @@ describe('L1Publisher', () => { }); it('publishes l2 block to l1', async () => { - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(true); expect(txSender.sendProcessTx).toHaveBeenCalledWith({ proof: l2Proof, inputs: l2Inputs }); @@ -48,7 +48,7 @@ describe('L1Publisher', () => { it('does not retry if sending a tx fails', async () => { txSender.sendProcessTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(txHash); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(false); expect(txSender.sendProcessTx).toHaveBeenCalledTimes(1); @@ -57,7 +57,7 @@ describe('L1Publisher', () => { it('retries if fetching the receipt fails', async () => { txSender.getTransactionReceipt.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(txReceipt); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(true); expect(txSender.getTransactionReceipt).toHaveBeenCalledTimes(2); @@ -66,7 +66,7 @@ describe('L1Publisher', () => { it('returns false if tx reverts', async () => { txSender.getTransactionReceipt.mockReset().mockResolvedValueOnce({ ...txReceipt, status: false }); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(false); }); @@ -74,7 +74,7 @@ describe('L1Publisher', () => { it('returns false if interrupted', async () => { txSender.sendProcessTx.mockReset().mockImplementationOnce(() => sleep(10, txHash)); - const resultPromise = publisher.processL2Block(l2Block); + const resultPromise = publisher.publishL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index d30b32de7667..efa87c385424 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -82,17 +82,17 @@ export interface L1PublisherTxSender { } /** - * Encoded block data and proof ready to be pushed to the L1 contract. + * Encoded block and proof ready to be pushed to the L1 contract. */ export type L1ProcessArgs = { - /** - * Root rollup proof for an L1 block. - */ + /** The L2 block header. */ + header: Buffer; + /** A snapshot (root and next available leaf index) of the archive tree after the L2 block is applied. */ + archive: Buffer; + /** L2 block body. */ + body: Buffer; + /** Root rollup proof of the L2 block. */ proof: Buffer; - /** - * Serialized L2Block data. - */ - inputs: Buffer; }; /** @@ -124,14 +124,18 @@ export class L1Publisher implements L2BlockReceiver { } /** - * Processes incoming L2 block data by publishing it to the L1 rollup contract. - * @param l2BlockData - L2 block data to publish. + * Publishes L2 block on L1. + * @param block - L2 block to publish. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ - public async processL2Block(l2BlockData: L2Block): Promise { - const proof = Buffer.alloc(0); - const txData = { proof, inputs: l2BlockData.toBufferWithLogs() }; - const startStateHash = l2BlockData.getStartStateHash(); + public async publishL2Block(block: L2Block): Promise { + const txData = { + header: block.header.toBuffer(), + archive: block.archive.toBuffer(), + body: block.bodyToBuffer(), + proof: Buffer.alloc(0), + }; + const startStateHash = block.getStartStateHash(); while (!this.interrupted) { // TODO: Remove this block number check, it's here because we don't currently have proper genesis state on the contract @@ -157,7 +161,7 @@ export class L1Publisher implements L2BlockReceiver { const stats: L1PublishStats = { ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), ...pick(tx!, 'calldataGas', 'calldataSize'), - ...l2BlockData.getStats(), + ...block.getStats(), eventName: 'rollup-published-to-l1', }; this.log.info(`Published L2 block to L1 rollup contract`, stats); @@ -242,6 +246,7 @@ export class L1Publisher implements L2BlockReceiver { * @param startStateHash - The start state hash of the block we wish to publish. * @returns Boolean indicating if the hashes are equal. */ + // TODO(benesjan): rename this private async checkStartStateHash(startStateHash: Buffer): Promise { const fromChain = await this.txSender.getCurrentStateHash(); const areSame = startStateHash.equals(fromChain); diff --git a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts index 89a3037ec325..e301b6d0b632 100644 --- a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts +++ b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts @@ -76,7 +76,7 @@ export class ViemTxSender implements L1PublisherTxSender { } async getCurrentStateHash(): Promise { - const stateHash = await this.rollupContract.read.rollupStateHash(); + const stateHash = await this.rollupContract.read.archive(); return Buffer.from(stateHash.replace('0x', ''), 'hex'); } @@ -122,7 +122,12 @@ export class ViemTxSender implements L1PublisherTxSender { * @returns The hash of the mined tx. */ async sendProcessTx(encodedData: ProcessTxArgs): Promise { - const args = [`0x${encodedData.proof.toString('hex')}`, `0x${encodedData.inputs.toString('hex')}`] as const; + const args = [ + `0x${encodedData.header.toString('hex')}`, + `0x${encodedData.archive.toString('hex')}`, + `0x${encodedData.body.toString('hex')}`, + `0x${encodedData.proof.toString('hex')}`, + ] as const; const gas = await this.rollupContract.estimateGas.process(args, { account: this.account, diff --git a/yarn-project/sequencer-client/src/receiver.ts b/yarn-project/sequencer-client/src/receiver.ts index f731a8f85048..4ae605ed98f6 100644 --- a/yarn-project/sequencer-client/src/receiver.ts +++ b/yarn-project/sequencer-client/src/receiver.ts @@ -9,5 +9,5 @@ export interface L2BlockReceiver { * Receive and L2 block and process it, returns true if successful. * @param l2BlockData - L2 block to process. */ - processL2Block(l2BlockData: L2Block): Promise; + publishL2Block(l2BlockData: L2Block): Promise; } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 799ec870bd9d..6a0d0abb145f 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -93,7 +93,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce([tx]); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -108,7 +108,7 @@ describe('sequencer', () => { expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block); + expect(publisher.publishL2Block).toHaveBeenCalledWith(block); }); it('builds a block out of several txs rejecting double spends', async () => { @@ -122,7 +122,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce(txs); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -145,7 +145,7 @@ describe('sequencer', () => { expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block); + expect(publisher.publishL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([await doubleSpendTx.getTxHash()]); }); @@ -160,7 +160,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce(txs); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -178,7 +178,7 @@ describe('sequencer', () => { expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block); + expect(publisher.publishL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([await invalidChainTx.getTxHash()]); }); @@ -190,7 +190,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce([tx]); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -207,7 +207,7 @@ describe('sequencer', () => { await sequencer.work(); - expect(publisher.processL2Block).not.toHaveBeenCalled(); + expect(publisher.publishL2Block).not.toHaveBeenCalled(); }); }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index bbb9a6af75ce..5324363e9f03 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -260,7 +260,7 @@ export class Sequencer { protected async publishL2Block(block: L2Block) { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - const publishedL2Block = await this.publisher.processL2Block(block); + const publishedL2Block = await this.publisher.publishL2Block(block); if (publishedL2Block) { this.log(`Successfully published block ${block.number}`); this.lastPublishedBlock = block.number;