From a0a59e9ff39a8670e4c39d6b2edffa9d3dd45cc9 Mon Sep 17 00:00:00 2001 From: benesjan Date: Fri, 12 Jan 2024 14:35:08 +0000 Subject: [PATCH 1/9] WIP --- l1-contracts/src/core/Rollup.sol | 21 +++++--- .../core/libraries/decoders/HeaderDecoder.sol | 53 +++++++++---------- yarn-project/circuit-types/src/l2_block.ts | 33 +++++++++++- .../src/structs/global_variables.ts | 1 + .../circuits.js/src/structs/header.ts | 3 +- .../src/structs/state_reference.ts | 1 + .../src/integration_l1_publisher.test.ts | 4 +- .../src/publisher/l1-publisher.test.ts | 10 ++-- .../src/publisher/l1-publisher.ts | 13 ++--- yarn-project/sequencer-client/src/receiver.ts | 2 +- .../src/sequencer/sequencer.test.ts | 16 +++--- .../src/sequencer/sequencer.ts | 2 +- 12 files changed, 100 insertions(+), 59 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 82db79ada62f..71deebe305e4 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -44,12 +44,21 @@ contract Rollup is IRollup { } /** - * @notice Process an incoming L2Block and progress the state - * @param _proof - The proof of correct execution - * @param _l2Block - The L2Block data, formatted as outlined in `Decoder.sol` + * @notice Process an incoming L2 block and progress the state + * @param _header - The L2 block header. + * @param _archive - A snapshot (root and next available leaf index) of the archive tree after the L2 block is applied + * @param _l1_to_l2_msgs - The L1 to L2 messages processed in this block + * @param _l2_to_l1_msgs - The L2 to L1 messages processed in this block + * @param _proof - The proof of correct execution. */ - function process(bytes memory _proof, bytes calldata _l2Block) external override(IRollup) { - _constrainGlobals(_l2Block); + function process( + bytes calldata _header, + bytes calldata _archive, + bytes calldata _l1_to_l2_msgs, + bytes calldata _l2_to_l1_msgs, + bytes memory _proof + ) external override(IRollup) { + _validateHeader(_header); // Decode the header (uint256 l2BlockNumber, bytes32 oldStateHash, bytes32 newStateHash) = @@ -101,7 +110,7 @@ contract Rollup is IRollup { emit L2BlockProcessed(l2BlockNumber); } - function _constrainGlobals(bytes calldata _header) internal view { + function _validateHeader(bytes calldata _header) internal view { uint256 chainId = uint256(bytes32(_header[:0x20])); uint256 version = uint256(bytes32(_header[0x20:0x40])); uint256 ts = uint256(bytes32(_header[0x60:0x80])); diff --git a/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol b/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol index f2e7778e1f6c..d902a257577e 100644 --- a/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol +++ b/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol @@ -20,34 +20,31 @@ import {Hash} from "../Hash.sol"; * * | byte start | num bytes | name * | --- | --- | --- - * | 0x0000 | 0x20 | chain-id - * | 0x0020 | 0x20 | version - * | 0x0040 | 0x20 | L2 block number - * | 0x0060 | 0x20 | L2 timestamp - * | 0x0080 | 0x20 | startNoteHashTreeSnapshot.root - * | 0x00a0 | 0x04 | startNoteHashTreeSnapshot.nextAvailableLeafIndex - * | 0x00a4 | 0x20 | startNullifierTreeSnapshot.root - * | 0x00c4 | 0x04 | startNullifierTreeSnapshot.nextAvailableLeafIndex - * | 0x00c8 | 0x20 | startContractTreeSnapshot.root - * | 0x00e8 | 0x04 | startContractTreeSnapshot.nextAvailableLeafIndex - * | 0x00ec | 0x20 | startPublicDataTreeSnapshot.root - * | 0x010c | 0x04 | startPublicDataTreeSnapshot.nextAvailableLeafIndex - * | 0x0110 | 0x20 | startL1ToL2MessageTreeSnapshot.root - * | 0x0130 | 0x04 | startL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x0134 | 0x20 | startArchiveSnapshot.root - * | 0x0154 | 0x04 | startArchiveSnapshot.nextAvailableLeafIndex - * | 0x0158 | 0x20 | endNoteHashTreeSnapshot.root - * | 0x0178 | 0x04 | endNoteHashTreeSnapshot.nextAvailableLeafIndex - * | 0x017c | 0x20 | endNullifierTreeSnapshot.root - * | 0x019c | 0x04 | endNullifierTreeSnapshot.nextAvailableLeafIndex - * | 0x01a0 | 0x20 | endContractTreeSnapshot.root - * | 0x01c0 | 0x04 | endContractTreeSnapshot.nextAvailableLeafIndex - * | 0x01c4 | 0x20 | endPublicDataTreeSnapshot.root - * | 0x01e4 | 0x04 | endPublicDataTreeSnapshot.nextAvailableLeafIndex - * | 0x01e8 | 0x20 | endL1ToL2MessageTreeSnapshot.root - * | 0x0208 | 0x04 | endL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x020c | 0x20 | endArchiveSnapshot.root - * | 0x022c | 0x04 | endArchiveSnapshot.nextAvailableLeafIndex + * | | | Header { + * | | | GlobalVariables { + * | 0x0000 | 0x20 | chainId + * | 0x0020 | 0x20 | version + * | 0x0040 | 0x20 | blockNumber + * | 0x0060 | 0x20 | timestamp + * | | | } + * | | | StateReference { + * | 0x0080 | 0x20 | l1ToL2MessageTree.root + * | 0x00a0 | 0x04 | l1ToL2MessageTree.nextAvailableLeafIndex + * | | | PartialStateReference { + * | 0x00a4 | 0x20 | noteHashTree.root + * | 0x00c4 | 0x04 | noteHashTree.nextAvailableLeafIndex + * | 0x00c8 | 0x20 | nullifierTree.root + * | 0x00e8 | 0x04 | nullifierTree.nextAvailableLeafIndex + * | 0x00ec | 0x20 | contractTree.root + * | 0x010c | 0x04 | contractTree.nextAvailableLeafIndex + * | 0x0110 | 0x20 | publicDataTree.root + * | 0x0130 | 0x04 | publicDataTree.nextAvailableLeafIndex + * | | | } + * | | | } + * | 0x0134 | 0x20 | lastArchive.root + * | 0x0154 | 0x04 | lastArchive.nextAvailableLeafIndex + * | 0x0158 | 0x20 | bodyHash + * | | | } * | --- | --- | --- */ library HeaderDecoder { diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index f3d570d3705d..8d539e90785c 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -223,7 +223,7 @@ export class L2Block { */ newContractData: ContractData[]; /** - * The L1 to L2 messages to be inserted into the L2 toL2 message tree. + * The L1 to L2 messages to be inserted into the L1 to L2 message tree. */ newL1ToL2Messages: Fr[]; /** @@ -308,6 +308,37 @@ export class L2Block { return serializeToBuffer(this.toBuffer(), this.newEncryptedLogs, this.newUnencryptedLogs); } + headerAndArchiveToBuffer() { + return serializeToBuffer( + this.header, + this.archive, + ); + } + + bodyToBuffer(): Buffer { + if (this.newEncryptedLogs === undefined || this.newUnencryptedLogs === undefined) { + throw new Error( + `newEncryptedLogs and newUnencryptedLogs must be defined when encoding L2BlockData (block ${this.header.globalVariables.blockNumber})`, + ); + } + + return serializeToBuffer( + this.newCommitments.length, + this.newCommitments, + this.newNullifiers.length, + this.newNullifiers, + this.newPublicDataWrites.length, + this.newPublicDataWrites, + this.newL2ToL1Msgs.length, + this.newL2ToL1Msgs, + this.newContracts.length, + this.newContracts, + this.newContractData, + this.newL1ToL2Messages.length, + this.newL1ToL2Messages, + ); + } + /** * Serializes a block without logs to a string. * @remarks This is used when the block is being served via JSON-RPC because the logs are expected to be served diff --git a/yarn-project/circuits.js/src/structs/global_variables.ts b/yarn-project/circuits.js/src/structs/global_variables.ts index 7fd594767725..2a569fb984c0 100644 --- a/yarn-project/circuits.js/src/structs/global_variables.ts +++ b/yarn-project/circuits.js/src/structs/global_variables.ts @@ -53,6 +53,7 @@ export class GlobalVariables { } static getFields(fields: FieldsOf) { + // Note: The order here must match the order in the HeaderDecoder solidity library. return [fields.chainId, fields.version, fields.blockNumber, fields.timestamp] as const; } diff --git a/yarn-project/circuits.js/src/structs/header.ts b/yarn-project/circuits.js/src/structs/header.ts index 13e518a73065..9bcd9be63d6b 100644 --- a/yarn-project/circuits.js/src/structs/header.ts +++ b/yarn-project/circuits.js/src/structs/header.ts @@ -20,7 +20,8 @@ export class Header { ) {} toBuffer() { - return serializeToBuffer(this.lastArchive, this.bodyHash, this.state, this.globalVariables); + // Note: The order here must match the order in the HeaderDecoder solidity library. + return serializeToBuffer(this.globalVariables, this.state, this.lastArchive, this.bodyHash); } static fromBuffer(buffer: Buffer | BufferReader): Header { diff --git a/yarn-project/circuits.js/src/structs/state_reference.ts b/yarn-project/circuits.js/src/structs/state_reference.ts index 88b649f601bb..21c9359434f5 100644 --- a/yarn-project/circuits.js/src/structs/state_reference.ts +++ b/yarn-project/circuits.js/src/structs/state_reference.ts @@ -15,6 +15,7 @@ export class StateReference { ) {} toBuffer() { + // Note: The order here must match the order in the HeaderDecoder solidity library. return serializeToBuffer(this.l1ToL2MessageTree, this.partial); } diff --git a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts index a4f2156ad331..a179716361ce 100644 --- a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts @@ -351,7 +351,7 @@ describe('L1Publisher integration', () => { writeJson(`mixed_block_${i}`, block, l1ToL2Messages, l1ToL2Content, recipientAddress, deployerAccount.address); - await publisher.processL2Block(block); + await publisher.publishL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, @@ -427,7 +427,7 @@ describe('L1Publisher integration', () => { writeJson(`empty_block_${i}`, block, l1ToL2Messages, [], AztecAddress.ZERO, deployerAccount.address); - await publisher.processL2Block(block); + await publisher.publishL2Block(block); const logs = await publicClient.getLogs({ address: rollupAddress, diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 123203a59dde..8ac7eb20b84d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -30,7 +30,7 @@ describe('L1Publisher', () => { }); it('publishes l2 block to l1', async () => { - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(true); expect(txSender.sendProcessTx).toHaveBeenCalledWith({ proof: l2Proof, inputs: l2Inputs }); @@ -48,7 +48,7 @@ describe('L1Publisher', () => { it('does not retry if sending a tx fails', async () => { txSender.sendProcessTx.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(txHash); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(false); expect(txSender.sendProcessTx).toHaveBeenCalledTimes(1); @@ -57,7 +57,7 @@ describe('L1Publisher', () => { it('retries if fetching the receipt fails', async () => { txSender.getTransactionReceipt.mockReset().mockRejectedValueOnce(new Error()).mockResolvedValueOnce(txReceipt); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(true); expect(txSender.getTransactionReceipt).toHaveBeenCalledTimes(2); @@ -66,7 +66,7 @@ describe('L1Publisher', () => { it('returns false if tx reverts', async () => { txSender.getTransactionReceipt.mockReset().mockResolvedValueOnce({ ...txReceipt, status: false }); - const result = await publisher.processL2Block(l2Block); + const result = await publisher.publishL2Block(l2Block); expect(result).toEqual(false); }); @@ -74,7 +74,7 @@ describe('L1Publisher', () => { it('returns false if interrupted', async () => { txSender.sendProcessTx.mockReset().mockImplementationOnce(() => sleep(10, txHash)); - const resultPromise = publisher.processL2Block(l2Block); + const resultPromise = publisher.publishL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index d30b32de7667..db6b3cdfa242 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -124,14 +124,15 @@ export class L1Publisher implements L2BlockReceiver { } /** - * Processes incoming L2 block data by publishing it to the L1 rollup contract. - * @param l2BlockData - L2 block data to publish. + * Publishes L2 block on L1. + * @param block - L2 block to publish. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ - public async processL2Block(l2BlockData: L2Block): Promise { + public async publishL2Block(block: L2Block): Promise { const proof = Buffer.alloc(0); - const txData = { proof, inputs: l2BlockData.toBufferWithLogs() }; - const startStateHash = l2BlockData.getStartStateHash(); + + const txData = { proof, inputs: block.toBufferWithLogs() }; + const startStateHash = block.getStartStateHash(); while (!this.interrupted) { // TODO: Remove this block number check, it's here because we don't currently have proper genesis state on the contract @@ -157,7 +158,7 @@ export class L1Publisher implements L2BlockReceiver { const stats: L1PublishStats = { ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), ...pick(tx!, 'calldataGas', 'calldataSize'), - ...l2BlockData.getStats(), + ...block.getStats(), eventName: 'rollup-published-to-l1', }; this.log.info(`Published L2 block to L1 rollup contract`, stats); diff --git a/yarn-project/sequencer-client/src/receiver.ts b/yarn-project/sequencer-client/src/receiver.ts index f731a8f85048..4ae605ed98f6 100644 --- a/yarn-project/sequencer-client/src/receiver.ts +++ b/yarn-project/sequencer-client/src/receiver.ts @@ -9,5 +9,5 @@ export interface L2BlockReceiver { * Receive and L2 block and process it, returns true if successful. * @param l2BlockData - L2 block to process. */ - processL2Block(l2BlockData: L2Block): Promise; + publishL2Block(l2BlockData: L2Block): Promise; } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 799ec870bd9d..6a0d0abb145f 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -93,7 +93,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce([tx]); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -108,7 +108,7 @@ describe('sequencer', () => { expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block); + expect(publisher.publishL2Block).toHaveBeenCalledWith(block); }); it('builds a block out of several txs rejecting double spends', async () => { @@ -122,7 +122,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce(txs); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -145,7 +145,7 @@ describe('sequencer', () => { expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block); + expect(publisher.publishL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([await doubleSpendTx.getTxHash()]); }); @@ -160,7 +160,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce(txs); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -178,7 +178,7 @@ describe('sequencer', () => { expectedTxHashes.map(hash => expect.objectContaining({ hash })), Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.processL2Block).toHaveBeenCalledWith(block); + expect(publisher.publishL2Block).toHaveBeenCalledWith(block); expect(p2p.deleteTxs).toHaveBeenCalledWith([await invalidChainTx.getTxHash()]); }); @@ -190,7 +190,7 @@ describe('sequencer', () => { p2p.getTxs.mockResolvedValueOnce([tx]); blockBuilder.buildL2Block.mockResolvedValueOnce([block, proof]); - publisher.processL2Block.mockResolvedValueOnce(true); + publisher.publishL2Block.mockResolvedValueOnce(true); globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce( new GlobalVariables(chainId, version, new Fr(lastBlockNumber + 1), Fr.ZERO), ); @@ -207,7 +207,7 @@ describe('sequencer', () => { await sequencer.work(); - expect(publisher.processL2Block).not.toHaveBeenCalled(); + expect(publisher.publishL2Block).not.toHaveBeenCalled(); }); }); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index bbb9a6af75ce..5324363e9f03 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -260,7 +260,7 @@ export class Sequencer { protected async publishL2Block(block: L2Block) { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - const publishedL2Block = await this.publisher.processL2Block(block); + const publishedL2Block = await this.publisher.publishL2Block(block); if (publishedL2Block) { this.log(`Successfully published block ${block.number}`); this.lastPublishedBlock = block.number; From 252013b01f3c728c6bc533ecb6a25f1d71922d00 Mon Sep 17 00:00:00 2001 From: benesjan Date: Fri, 12 Jan 2024 16:10:16 +0000 Subject: [PATCH 2/9] from2Fields func --- .../src/serialize/free_funcs.test.ts | 18 ++++++++++++++++++ .../foundation/src/serialize/free_funcs.ts | 19 +++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100644 yarn-project/foundation/src/serialize/free_funcs.test.ts diff --git a/yarn-project/foundation/src/serialize/free_funcs.test.ts b/yarn-project/foundation/src/serialize/free_funcs.test.ts new file mode 100644 index 000000000000..e3be26dd2d88 --- /dev/null +++ b/yarn-project/foundation/src/serialize/free_funcs.test.ts @@ -0,0 +1,18 @@ +import { randomBytes } from '../crypto/index.js'; +import { from2Fields, to2Fields } from './free_funcs.js'; + +describe('buffer to fields and back', () => { + it('should correctly serialize and deserialize a buffer', () => { + // Generate a random 32-byte buffer + const originalBuffer = randomBytes(32); + + // Serialize the buffer to two fields + const [field1, field2] = to2Fields(originalBuffer); + + // Deserialize the fields back to a buffer + const reconstructedBuffer = from2Fields(field1, field2); + + // Check if the original buffer and reconstructed buffer are identical + expect(reconstructedBuffer).toEqual(originalBuffer); + }); +}); diff --git a/yarn-project/foundation/src/serialize/free_funcs.ts b/yarn-project/foundation/src/serialize/free_funcs.ts index 6165657f0f90..a9f9dcdfc1ef 100644 --- a/yarn-project/foundation/src/serialize/free_funcs.ts +++ b/yarn-project/foundation/src/serialize/free_funcs.ts @@ -126,3 +126,22 @@ export function to2Fields(buf: Buffer): [Fr, Fr] { return [Fr.fromBuffer(buf1), Fr.fromBuffer(buf2)]; } + +/** + * Reconstructs the original 32 bytes of data from 2 field elements. + * @param field1 - First field element + * @param field2 - Second field element + * @returns 32 bytes of data as a Buffer + */ +export function from2Fields(field1: Fr, field2: Fr): Buffer { + // Convert the field elements back to buffers + const buf1 = field1.toBuffer(); + const buf2 = field2.toBuffer(); + + // Remove the padding (first 16 bytes) from each buffer + const originalPart1 = buf1.slice(16, 32); + const originalPart2 = buf2.slice(16, 32); + + // Concatenate the two parts to form the original buffer + return Buffer.concat([originalPart1, originalPart2]); +} From da29ee50cb4c755aca07996b6b09915a5e366e52 Mon Sep 17 00:00:00 2001 From: benesjan Date: Sat, 13 Jan 2024 15:30:44 +0000 Subject: [PATCH 3/9] representing body hash as buffer in TS --- .../circuits.js/src/structs/header.test.ts | 12 ++++++++++++ yarn-project/circuits.js/src/structs/header.ts | 14 +++++++++----- .../src/structs/rollup/root_rollup.ts | 17 ----------------- yarn-project/circuits.js/src/tests/factories.ts | 6 ++++-- .../src/type_conversion.ts | 15 +++++++++++++-- .../src/block_builder/solo_block_builder.ts | 2 +- 6 files changed, 39 insertions(+), 27 deletions(-) create mode 100644 yarn-project/circuits.js/src/structs/header.test.ts diff --git a/yarn-project/circuits.js/src/structs/header.test.ts b/yarn-project/circuits.js/src/structs/header.test.ts new file mode 100644 index 000000000000..4cc4e60d8b5d --- /dev/null +++ b/yarn-project/circuits.js/src/structs/header.test.ts @@ -0,0 +1,12 @@ +import { makeHeader } from '../tests/factories.js'; +import { Header } from './header.js'; + +describe('Header', () => { + it(`serializes to buffer and deserializes it back`, () => { + const randomInt = Math.floor(Math.random() * 1000); + const expected = makeHeader(randomInt, undefined); + const buffer = expected.toBuffer(); + const res = Header.fromBuffer(buffer); + expect(res).toEqual(expected); + }); +}); diff --git a/yarn-project/circuits.js/src/structs/header.ts b/yarn-project/circuits.js/src/structs/header.ts index 9bcd9be63d6b..43dc5bfdab63 100644 --- a/yarn-project/circuits.js/src/structs/header.ts +++ b/yarn-project/circuits.js/src/structs/header.ts @@ -1,23 +1,27 @@ -import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { NUM_FIELDS_PER_SHA256 } from '../constants.gen.js'; import { GlobalVariables } from './global_variables.js'; import { AppendOnlyTreeSnapshot } from './rollup/append_only_tree_snapshot.js'; import { StateReference } from './state_reference.js'; +export const NUM_BYTES_PER_SHA256 = 32; + /** A header of an L2 block. */ export class Header { constructor( /** Snapshot of archive before the block is applied. */ public lastArchive: AppendOnlyTreeSnapshot, /** Hash of the body of an L2 block. */ - public bodyHash: [Fr, Fr], + public bodyHash: Buffer, /** State reference. */ public state: StateReference, /** Global variables of an L2 block. */ public globalVariables: GlobalVariables, - ) {} + ) { + if (bodyHash.length !== 32) { + throw new Error('Body hash buffer must be 32 bytes'); + } + } toBuffer() { // Note: The order here must match the order in the HeaderDecoder solidity library. @@ -28,7 +32,7 @@ export class Header { const reader = BufferReader.asReader(buffer); return new Header( reader.readObject(AppendOnlyTreeSnapshot), - reader.readArray(NUM_FIELDS_PER_SHA256, Fr) as [Fr, Fr], + reader.readBytes(NUM_BYTES_PER_SHA256), reader.readObject(StateReference), reader.readObject(GlobalVariables), ); diff --git a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts index 5a4b34df659d..2a839e6eda6e 100644 --- a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts @@ -95,23 +95,6 @@ export class RootRollupPublicInputs { return new RootRollupPublicInputs(...RootRollupPublicInputs.getFields(fields)); } - /** - * Returns the sha256 hash of the calldata. - * @returns The sha256 hash of the calldata. - */ - public sha256CalldataHash(): Buffer { - const high = this.header.bodyHash[0].toBuffer(); - const low = this.header.bodyHash[1].toBuffer(); - - const hash = Buffer.alloc(32); - for (let i = 0; i < 16; i++) { - hash[i] = high[i + 16]; - hash[i + 16] = low[i + 16]; - } - - return hash; - } - /** * Deserializes a buffer into a `RootRollupPublicInputs` object. * @param buffer - The buffer to deserialize. diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 93439eea2bf1..6586d23ceb18 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -3,6 +3,8 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { EthAddress } from '@aztec/foundation/eth-address'; import { numToUInt32BE } from '@aztec/foundation/serialize'; +import { randomBytes } from 'crypto'; + import { SchnorrSignature } from '../barretenberg/index.js'; import { ARCHIVE_HEIGHT, @@ -101,7 +103,7 @@ import { WitnessedPublicCallData, } from '../index.js'; import { GlobalVariables } from '../structs/global_variables.js'; -import { Header } from '../structs/header.js'; +import { Header, NUM_BYTES_PER_SHA256 } from '../structs/header.js'; /** * Creates an arbitrary side effect object with the given seed. @@ -882,7 +884,7 @@ export function makeRootRollupPublicInputs( export function makeHeader(seed = 0, globalVariables: GlobalVariables | undefined): Header { return new Header( makeAppendOnlyTreeSnapshot(seed + 0x100), - [new Fr(5n), new Fr(6n)], + randomBytes(NUM_BYTES_PER_SHA256), makeStateReference(seed + 0x200), globalVariables ?? makeGlobalVariables((seed += 0x100)), ); diff --git a/yarn-project/noir-protocol-circuits/src/type_conversion.ts b/yarn-project/noir-protocol-circuits/src/type_conversion.ts index 260d5946f8e2..9b598111b4ae 100644 --- a/yarn-project/noir-protocol-circuits/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits/src/type_conversion.ts @@ -72,7 +72,7 @@ import { TxContext, TxRequest, } from '@aztec/circuits.js'; -import { Tuple, mapTuple } from '@aztec/foundation/serialize'; +import { Tuple, from2Fields, mapTuple } from '@aztec/foundation/serialize'; import { BlockHeader as BlockHeaderNoir, @@ -137,6 +137,8 @@ import { AppendOnlyTreeSnapshot as AppendOnlyTreeSnapshotNoir, BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir, ConstantRollupData as ConstantRollupDataNoir, + Field, + FixedLengthArray, GlobalVariables as GlobalVariablesNoir, Header as HeaderNoir, PartialStateReference as PartialStateReferenceNoir, @@ -678,6 +680,15 @@ export function mapTupleFromNoir( return Array.from({ length }, (_, idx) => mapper(noirArray[idx])) as Tuple; } +/** + * Maps a SHA256 hash from noir to the parsed type. + * @param hash - The hash as it is represented in Noir (2 fields). + * @returns The hash represented as a 32 bytes long buffer. + */ +export function mapSha256HashFromNoir(hash: FixedLengthArray): Buffer { + return from2Fields(mapFieldFromNoir(hash[0]), mapFieldFromNoir(hash[1])); +} + /** * Maps optionally revealed data from noir to the parsed type. * @param optionallyRevealedData - The noir optionally revealed data. @@ -1318,7 +1329,7 @@ export function mapRootRollupPublicInputsFromNoir( export function mapHeaderFromNoir(header: HeaderNoir): Header { return new Header( mapAppendOnlyTreeSnapshotFromNoir(header.last_archive), - mapTupleFromNoir(header.body_hash, 2, mapFieldFromNoir), + mapSha256HashFromNoir(header.body_hash), mapStateReferenceFromNoir(header.state), mapGlobalVariablesFromNoir(header.global_variables), ); diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts index d8958a6d4091..ee8d709fee08 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts @@ -144,7 +144,7 @@ export class SoloBlockBuilder implements BlockBuilder { newUnencryptedLogs, }); - if (!l2Block.getCalldataHash().equals(circuitsOutput.sha256CalldataHash())) { + if (!l2Block.getCalldataHash().equals(circuitsOutput.header.bodyHash)) { throw new Error( `Calldata hash mismatch, ${l2Block.getCalldataHash().toString('hex')} == ${circuitsOutput .sha256CalldataHash() From 3651219d2d69cae4d45b622e49fa524df92d1254 Mon Sep 17 00:00:00 2001 From: benesjan Date: Sat, 13 Jan 2024 17:09:20 +0000 Subject: [PATCH 4/9] cleanup --- .../noir-protocol-circuits/src/crates/rollup-lib/src/root.nr | 5 ----- 1 file changed, 5 deletions(-) diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr index 0fff41cc8fee..e44769c9719c 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr @@ -65,11 +65,6 @@ impl RootRollupInputs { 0 ); - let zeroed_out_snapshot = AppendOnlyTreeSnapshot { - root : 0, - next_available_leaf_index : 0 - }; - let header = Header { last_archive: left.constants.last_archive, body_hash: components::compute_calldata_hash(self.previous_rollup_data), From e628abe9ab4c956df8c742a80e25e8e79ae414b7 Mon Sep 17 00:00:00 2001 From: benesjan Date: Sat, 13 Jan 2024 17:10:50 +0000 Subject: [PATCH 5/9] WIP --- l1-contracts/src/core/Rollup.sol | 52 ++++++++----------- l1-contracts/src/core/interfaces/IRollup.sol | 9 +++- l1-contracts/src/core/libraries/Errors.sol | 2 +- .../core/libraries/decoders/HeaderDecoder.sol | 37 ++++++++----- 4 files changed, 54 insertions(+), 46 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 71deebe305e4..bf11afbaa743 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -30,7 +30,7 @@ contract Rollup is IRollup { uint256 public immutable VERSION; AvailabilityOracle public immutable AVAILABILITY_ORACLE; - bytes32 public rollupStateHash; + bytes32 public archive; // Root of the archive tree uint256 public lastBlockTs; // Tracks the last time time was warped on L2 ("warp" is the testing cheatcode). // See https://github.com/AztecProtocol/aztec-packages/issues/1614 @@ -47,29 +47,26 @@ contract Rollup is IRollup { * @notice Process an incoming L2 block and progress the state * @param _header - The L2 block header. * @param _archive - A snapshot (root and next available leaf index) of the archive tree after the L2 block is applied - * @param _l1_to_l2_msgs - The L1 to L2 messages processed in this block - * @param _l2_to_l1_msgs - The L2 to L1 messages processed in this block + * @param _body - The L2 block body. * @param _proof - The proof of correct execution. */ function process( bytes calldata _header, bytes calldata _archive, - bytes calldata _l1_to_l2_msgs, - bytes calldata _l2_to_l1_msgs, + bytes calldata _body, // Note: this will be replaced with _txsHash once the separation is finished. bytes memory _proof ) external override(IRollup) { - _validateHeader(_header); + // TODO: @benejsan Should we represent this values from header as a nice struct? + HeaderDecoder.Header memory header = HeaderDecoder.decode(_header); - // Decode the header - (uint256 l2BlockNumber, bytes32 oldStateHash, bytes32 newStateHash) = - HeaderDecoder.decode(_l2Block[:HeaderDecoder.BLOCK_HEADER_SIZE]); + _validateHeader(header); // Check if the data is available using availability oracle (change availability oracle if you want a different DA layer) bytes32 txsHash; { // @todo @LHerskind Hack such that the node is unchanged for now. // should be removed when we have a proper block publication. - txsHash = AVAILABILITY_ORACLE.publish(_l2Block[HeaderDecoder.BLOCK_HEADER_SIZE:]); + txsHash = AVAILABILITY_ORACLE.publish(_body); } if (!AVAILABILITY_ORACLE.isAvailable(txsHash)) { @@ -79,10 +76,7 @@ contract Rollup is IRollup { // Decode the cross-chain messages (bytes32 inHash,, bytes32[] memory l1ToL2Msgs, bytes32[] memory l2ToL1Msgs) = - MessagesDecoder.decode(_l2Block[HeaderDecoder.BLOCK_HEADER_SIZE:]); - - bytes32 publicInputHash = - _computePublicInputHash(_l2Block[:HeaderDecoder.BLOCK_HEADER_SIZE], txsHash, inHash); + MessagesDecoder.decode(_body); // @todo @LHerskind Proper genesis state. If the state is empty, we allow anything for now. // TODO(#3936): Temporarily disabling this because L2Block encoding has not yet been updated. @@ -91,13 +85,14 @@ contract Rollup is IRollup { // } bytes32[] memory publicInputs = new bytes32[](1); - publicInputs[0] = publicInputHash; + publicInputs[0] = _computePublicInputHash(_header, txsHash, inHash); if (!VERIFIER.verify(_proof, publicInputs)) { revert Errors.Rollup__InvalidProof(); } - rollupStateHash = newStateHash; + // TODO: @benejsan Manually extracting the root here is ugly. TODO: Re-think how to represent archive snap. + archive = bytes32(_header[:0x20]); lastBlockTs = block.timestamp; // @todo (issue #605) handle fee collector @@ -107,24 +102,19 @@ contract Rollup is IRollup { IOutbox outbox = REGISTRY.getOutbox(); outbox.sendL1Messages(l2ToL1Msgs); - emit L2BlockProcessed(l2BlockNumber); + emit L2BlockProcessed(header.blockNumber); } - function _validateHeader(bytes calldata _header) internal view { - uint256 chainId = uint256(bytes32(_header[:0x20])); - uint256 version = uint256(bytes32(_header[0x20:0x40])); - uint256 ts = uint256(bytes32(_header[0x60:0x80])); - // block number already constrained by start state hash - - if (block.chainid != chainId) { - revert Errors.Rollup__InvalidChainId(chainId, block.chainid); + function _validateHeader(HeaderDecoder.Header memory header) internal view { + if (block.chainid != header.chainId) { + revert Errors.Rollup__InvalidChainId(header.chainId, block.chainid); } - if (version != VERSION) { - revert Errors.Rollup__InvalidVersion(version, VERSION); + if (header.version != VERSION) { + revert Errors.Rollup__InvalidVersion(header.version, VERSION); } - if (ts > block.timestamp) { + if (header.timestamp > block.timestamp) { revert Errors.Rollup__TimestampInFuture(); } @@ -132,9 +122,13 @@ contract Rollup is IRollup { // This will make multiple l2 blocks in the same l1 block impractical. // e.g., the first block will update timestamp which will make the second fail. // Could possibly allow multiple blocks if in same l1 block - if (ts < lastBlockTs) { + if (header.timestamp < lastBlockTs) { revert Errors.Rollup__TimestampTooOld(); } + + if (archive != header.lastArchive) { + revert Errors.Rollup__InvalidArchive(archive, header.lastArchive); + } } function _computePublicInputHash(bytes calldata _header, bytes32 _txsHash, bytes32 _inHash) diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index ab2fca20e181..264625275ff1 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -3,7 +3,12 @@ pragma solidity >=0.8.18; interface IRollup { - event L2BlockProcessed(uint256 indexed blockNum); + event L2BlockProcessed(uint256 indexed blockNumber); - function process(bytes memory _proof, bytes calldata _l2Block) external; + function process( + bytes calldata _header, + bytes calldata _archive, + bytes calldata _body, + bytes memory _proof + ) external; } diff --git a/l1-contracts/src/core/libraries/Errors.sol b/l1-contracts/src/core/libraries/Errors.sol index e8d6785f1833..39ec6a692bc2 100644 --- a/l1-contracts/src/core/libraries/Errors.sol +++ b/l1-contracts/src/core/libraries/Errors.sol @@ -48,7 +48,7 @@ library Errors { ); // 0x5e789f34 // Rollup - error Rollup__InvalidStateHash(bytes32 expected, bytes32 actual); // 0xa3cfaab3 + error Rollup__InvalidArchive(bytes32 expected, bytes32 actual); // 0xb682a40e error Rollup__InvalidProof(); // 0xa5b2ba17 error Rollup__InvalidChainId(uint256 expected, uint256 actual); // 0x37b5bc12 error Rollup__InvalidVersion(uint256 expected, uint256 actual); // 0x9ef30794 diff --git a/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol b/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol index d902a257577e..a016eb0ab0af 100644 --- a/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol +++ b/l1-contracts/src/core/libraries/decoders/HeaderDecoder.sol @@ -48,6 +48,15 @@ import {Hash} from "../Hash.sol"; * | --- | --- | --- */ library HeaderDecoder { + // TODO: This is only partial + struct Header { + uint256 chainId; + uint256 version; + uint256 blockNumber; + uint256 timestamp; + bytes32 lastArchive; + } + // DECODING OFFSET CONSTANTS // Where the start of trees metadata begins in the block uint256 private constant START_TREES_BLOCK_HEADER_OFFSET = 0x80; @@ -65,21 +74,21 @@ library HeaderDecoder { /** * @notice Decodes the header - * @param _header - The L2 block calldata. - * @return l2BlockNumber - The L2 block number - * @return startStateHash - The start state hash - * @return endStateHash - The end state hash + * @param _header - The header calldata. */ - function decode(bytes calldata _header) - internal - pure - returns (uint256 l2BlockNumber, bytes32 startStateHash, bytes32 endStateHash) - { - l2BlockNumber = uint256(bytes32(_header[0x40:0x60])); - // Note, for startStateHash to match the storage, the l2 block number must be new - 1. - // Only jumping 1 block at a time. - startStateHash = computeStateHash(l2BlockNumber - 1, START_TREES_BLOCK_HEADER_OFFSET, _header); - endStateHash = computeStateHash(l2BlockNumber, END_TREES_BLOCK_HEADER_OFFSET, _header); + function decode(bytes calldata _header) internal pure returns (Header memory) { + Header memory header; + + header.chainId = uint256(bytes32(_header[:0x20])); + header.version = uint256(bytes32(_header[0x20:0x40])); + header.blockNumber = uint256(bytes32(_header[0x40:0x60])); + header.timestamp = uint256(bytes32(_header[0x60:0x80])); + + // The rest is needed only by verifier and hence not decoded here. + + header.lastArchive = bytes32(_header[0x134:0x154]); + + return header; } /** From 757dc29dfac82f89ec30d3d071d39073b1ca1d37 Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 15 Jan 2024 09:30:54 +0000 Subject: [PATCH 6/9] temporarily disabling contract tests --- l1-contracts/test/Rollup.t.sol | 308 ++++++------- l1-contracts/test/decoders/Decoder.t.sol | 532 ++++++++++++----------- 2 files changed, 423 insertions(+), 417 deletions(-) diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 97c05292291a..7caf9cdc04b5 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -1,160 +1,160 @@ -// SPDX-License-Identifier: Apache-2.0 -// Copyright 2023 Aztec Labs. -pragma solidity >=0.8.18; +// // SPDX-License-Identifier: Apache-2.0 +// // Copyright 2023 Aztec Labs. +// pragma solidity >=0.8.18; -import {Test} from "forge-std/Test.sol"; +// import {Test} from "forge-std/Test.sol"; -import {DecoderTest} from "./decoders/Decoder.t.sol"; -import {DecoderHelper} from "./DecoderHelper.sol"; +// import {DecoderTest} from "./decoders/Decoder.t.sol"; +// import {DecoderHelper} from "./DecoderHelper.sol"; -import {DecoderBase} from "./decoders/Base.sol"; +// import {DecoderBase} from "./decoders/Base.sol"; -import {DataStructures} from "../src/core/libraries/DataStructures.sol"; +// import {DataStructures} from "../src/core/libraries/DataStructures.sol"; -import {Registry} from "../src/core/messagebridge/Registry.sol"; -import {Inbox} from "../src/core/messagebridge/Inbox.sol"; -import {Outbox} from "../src/core/messagebridge/Outbox.sol"; -import {Errors} from "../src/core/libraries/Errors.sol"; -import {Rollup} from "../src/core/Rollup.sol"; +// import {Registry} from "../src/core/messagebridge/Registry.sol"; +// import {Inbox} from "../src/core/messagebridge/Inbox.sol"; +// import {Outbox} from "../src/core/messagebridge/Outbox.sol"; +// import {Errors} from "../src/core/libraries/Errors.sol"; +// import {Rollup} from "../src/core/Rollup.sol"; -/** - * Blocks are generated using the `integration_l1_publisher.test.ts` tests. - * Main use of these test is shorter cycles when updating the decoder contract. - */ -contract RollupTest is DecoderBase { - DecoderHelper internal helper; - Registry internal registry; - Inbox internal inbox; - Outbox internal outbox; - Rollup internal rollup; - - function setUp() public virtual { - helper = new DecoderHelper(); - - registry = new Registry(); - inbox = new Inbox(address(registry)); - outbox = new Outbox(address(registry)); - rollup = new Rollup(registry); - - registry.upgrade(address(rollup), address(inbox), address(outbox)); - } - - function testMixedBlock() public { - _testBlock("mixed_block_0"); - } - - function testConsecutiveMixedBlocks() public { - _testBlock("mixed_block_0"); - _testBlock("mixed_block_1"); - } - - function testEmptyBlock() public { - _testBlock("empty_block_0"); - } - - function testConsecutiveEmptyBlocks() public { - _testBlock("empty_block_0"); - _testBlock("empty_block_1"); - } - - function testRevertInvalidChainId() public { - bytes memory block_ = load("empty_block_0").block.body; - - assembly { - mstore(add(block_, 0x20), 0x420) - } - - vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 0x420, 31337)); - rollup.process(bytes(""), block_); - } - - function testRevertInvalidVersion() public { - bytes memory block_ = load("empty_block_0").block.body; - - assembly { - mstore(add(block_, 0x40), 0x420) - } - - vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 0x420, 1)); - rollup.process(bytes(""), block_); - } - - function testRevertTimestampInFuture() public { - bytes memory block_ = load("empty_block_0").block.body; - - uint256 ts = block.timestamp + 1; - assembly { - mstore(add(block_, 0x80), ts) - } - - vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampInFuture.selector)); - rollup.process(bytes(""), block_); - } - - function testRevertTimestampTooOld() public { - bytes memory block_ = load("empty_block_0").block.body; - - // Overwrite in the rollup contract - vm.store(address(rollup), bytes32(uint256(1)), bytes32(uint256(block.timestamp))); - - vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampTooOld.selector)); - rollup.process(bytes(""), block_); - } - - function _testBlock(string memory name) public { - DecoderBase.Full memory full = load(name); - // We jump to the time of the block. - vm.warp(full.block.timestamp); - - _populateInbox(full.populate.sender, full.populate.recipient, full.populate.l1ToL2Content); - - for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { - if (full.messages.l1ToL2Messages[i] == bytes32(0)) { - continue; - } - assertTrue(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not in inbox"); - } - - vm.record(); - rollup.process(bytes(""), full.block.body); - - (, bytes32[] memory inboxWrites) = vm.accesses(address(inbox)); - (, bytes32[] memory outboxWrites) = vm.accesses(address(outbox)); - - { - uint256 count = 0; - for (uint256 i = 0; i < full.messages.l2ToL1Messages.length; i++) { - if (full.messages.l2ToL1Messages[i] == bytes32(0)) { - continue; - } - assertTrue(outbox.contains(full.messages.l2ToL1Messages[i]), "msg not in outbox"); - count++; - } - assertEq(outboxWrites.length, count, "Invalid outbox writes"); - } - - { - uint256 count = 0; - for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { - if (full.messages.l1ToL2Messages[i] == bytes32(0)) { - continue; - } - assertFalse(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not consumed"); - count++; - } - assertEq(inboxWrites.length, count, "Invalid inbox writes"); - } - - assertEq(rollup.rollupStateHash(), full.block.endStateHash, "Invalid rollup state hash"); - } - - function _populateInbox(address _sender, bytes32 _recipient, bytes32[] memory _contents) internal { - uint32 deadline = type(uint32).max; - for (uint256 i = 0; i < _contents.length; i++) { - vm.prank(_sender); - inbox.sendL2Message( - DataStructures.L2Actor({actor: _recipient, version: 1}), deadline, _contents[i], bytes32(0) - ); - } - } -} +// /** +// * Blocks are generated using the `integration_l1_publisher.test.ts` tests. +// * Main use of these test is shorter cycles when updating the decoder contract. +// */ +// contract RollupTest is DecoderBase { +// DecoderHelper internal helper; +// Registry internal registry; +// Inbox internal inbox; +// Outbox internal outbox; +// Rollup internal rollup; + +// function setUp() public virtual { +// helper = new DecoderHelper(); + +// registry = new Registry(); +// inbox = new Inbox(address(registry)); +// outbox = new Outbox(address(registry)); +// rollup = new Rollup(registry); + +// registry.upgrade(address(rollup), address(inbox), address(outbox)); +// } + +// function testMixedBlock() public { +// _testBlock("mixed_block_0"); +// } + +// function testConsecutiveMixedBlocks() public { +// _testBlock("mixed_block_0"); +// _testBlock("mixed_block_1"); +// } + +// function testEmptyBlock() public { +// _testBlock("empty_block_0"); +// } + +// function testConsecutiveEmptyBlocks() public { +// _testBlock("empty_block_0"); +// _testBlock("empty_block_1"); +// } + +// function testRevertInvalidChainId() public { +// bytes memory block_ = load("empty_block_0").block.body; + +// assembly { +// mstore(add(block_, 0x20), 0x420) +// } + +// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 0x420, 31337)); +// rollup.process(bytes(""), block_); +// } + +// function testRevertInvalidVersion() public { +// bytes memory block_ = load("empty_block_0").block.body; + +// assembly { +// mstore(add(block_, 0x40), 0x420) +// } + +// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 0x420, 1)); +// rollup.process(bytes(""), block_); +// } + +// function testRevertTimestampInFuture() public { +// bytes memory block_ = load("empty_block_0").block.body; + +// uint256 ts = block.timestamp + 1; +// assembly { +// mstore(add(block_, 0x80), ts) +// } + +// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampInFuture.selector)); +// rollup.process(bytes(""), block_); +// } + +// function testRevertTimestampTooOld() public { +// bytes memory block_ = load("empty_block_0").block.body; + +// // Overwrite in the rollup contract +// vm.store(address(rollup), bytes32(uint256(1)), bytes32(uint256(block.timestamp))); + +// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampTooOld.selector)); +// rollup.process(bytes(""), block_); +// } + +// function _testBlock(string memory name) public { +// DecoderBase.Full memory full = load(name); +// // We jump to the time of the block. +// vm.warp(full.block.timestamp); + +// _populateInbox(full.populate.sender, full.populate.recipient, full.populate.l1ToL2Content); + +// for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { +// if (full.messages.l1ToL2Messages[i] == bytes32(0)) { +// continue; +// } +// assertTrue(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not in inbox"); +// } + +// vm.record(); +// rollup.process(bytes(""), full.block.body); + +// (, bytes32[] memory inboxWrites) = vm.accesses(address(inbox)); +// (, bytes32[] memory outboxWrites) = vm.accesses(address(outbox)); + +// { +// uint256 count = 0; +// for (uint256 i = 0; i < full.messages.l2ToL1Messages.length; i++) { +// if (full.messages.l2ToL1Messages[i] == bytes32(0)) { +// continue; +// } +// assertTrue(outbox.contains(full.messages.l2ToL1Messages[i]), "msg not in outbox"); +// count++; +// } +// assertEq(outboxWrites.length, count, "Invalid outbox writes"); +// } + +// { +// uint256 count = 0; +// for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { +// if (full.messages.l1ToL2Messages[i] == bytes32(0)) { +// continue; +// } +// assertFalse(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not consumed"); +// count++; +// } +// assertEq(inboxWrites.length, count, "Invalid inbox writes"); +// } + +// assertEq(rollup.rollupStateHash(), full.block.endStateHash, "Invalid rollup state hash"); +// } + +// function _populateInbox(address _sender, bytes32 _recipient, bytes32[] memory _contents) internal { +// uint32 deadline = type(uint32).max; +// for (uint256 i = 0; i < _contents.length; i++) { +// vm.prank(_sender); +// inbox.sendL2Message( +// DataStructures.L2Actor({actor: _recipient, version: 1}), deadline, _contents[i], bytes32(0) +// ); +// } +// } +// } diff --git a/l1-contracts/test/decoders/Decoder.t.sol b/l1-contracts/test/decoders/Decoder.t.sol index c846f12a29a1..ae01d8616aad 100644 --- a/l1-contracts/test/decoders/Decoder.t.sol +++ b/l1-contracts/test/decoders/Decoder.t.sol @@ -1,263 +1,269 @@ -// SPDX-License-Identifier: Apache-2.0 -// Copyright 2023 Aztec Labs. -pragma solidity >=0.8.18; - -import {DecoderBase} from "./Base.sol"; - -import {Hash} from "../../src/core/libraries/Hash.sol"; -import {DataStructures} from "../../src/core/libraries/DataStructures.sol"; -import {DecoderHelper} from "../DecoderHelper.sol"; - -import {Decoder} from "../../src/core/libraries/decoders/Decoder.sol"; -import {HeaderDecoder} from "../../src/core/libraries/decoders/HeaderDecoder.sol"; -import {MessagesDecoder} from "../../src/core/libraries/decoders/MessagesDecoder.sol"; -import {TxsDecoder} from "../../src/core/libraries/decoders/TxsDecoder.sol"; - -import {AvailabilityOracle} from "../../src/core/availability_oracle/AvailabilityOracle.sol"; - -contract HeaderDecoderHelper { - // A wrapper used such that we get "calldata" and not memory - function decode(bytes calldata _header) - public - pure - returns (uint256 l2BlockNumber, bytes32 startStateHash, bytes32 endStateHash) - { - return HeaderDecoder.decode(_header); - } -} - -contract MessagesDecoderHelper { - // A wrapper used such that we get "calldata" and not memory - function decode(bytes calldata _header) - public - pure - returns ( - bytes32 l1ToL2MsgsHash, - bytes32 l2ToL1MsgsHash, - bytes32[] memory l1ToL2Msgs, - bytes32[] memory l2ToL1Msgs - ) - { - return MessagesDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); - } -} - -contract TxsDecoderHelper { - // A wrapper used such that we get "calldata" and not memory - function decode(bytes calldata _header) public pure returns (bytes32 txsHash) { - return TxsDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); - } -} - -/** - * Blocks are generated using the `integration_l1_publisher.test.ts` tests. - * Main use of these test is shorter cycles when updating the decoder contract. - * All tests here are skipped (all tests are prefixed with an underscore)! - * This is because we implicitly test the decoding in integration_l1_publisher.test.ts - */ -contract DecoderTest is DecoderBase { - DecoderHelper internal helper; - HeaderDecoderHelper internal headerHelper; - MessagesDecoderHelper internal messagesHelper; - TxsDecoderHelper internal txsHelper; - - function setUp() public virtual { - helper = new DecoderHelper(); - headerHelper = new HeaderDecoderHelper(); - messagesHelper = new MessagesDecoderHelper(); - txsHelper = new TxsDecoderHelper(); - } - - function testDecodeBlocks() public { - _testDecodeBlock("mixed_block_0"); - _testDecodeBlock("mixed_block_1"); - _testDecodeBlock("empty_block_0"); - _testDecodeBlock("empty_block_1"); - } - - function _testDecodeBlock(string memory name) public virtual { - DecoderBase.Full memory data = load(name); - - // Using the FULL decoder. - ( - uint256 l2BlockNumber, - bytes32 startStateHash, - bytes32 endStateHash, - bytes32 publicInputsHash, - bytes32[] memory l2ToL1Msgs, - bytes32[] memory l1ToL2Msgs - ) = helper.decode(data.block.body); - (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = - helper.computeDiffRootAndMessagesHash(data.block.body); - - // Header - { - (uint256 headerL2BlockNumber, bytes32 headerStartStateHash, bytes32 headerEndStateHash) = - headerHelper.decode(data.block.body); - - assertEq(l2BlockNumber, data.block.blockNumber, "Invalid block number"); - assertEq(headerL2BlockNumber, data.block.blockNumber, "Invalid block number"); - assertEq(startStateHash, data.block.startStateHash, "Invalid start state hash"); - assertEq(headerStartStateHash, data.block.startStateHash, "Invalid start state hash"); - assertEq(endStateHash, data.block.endStateHash, "Invalid end state hash"); - assertEq(headerEndStateHash, data.block.endStateHash, "Invalid end state hash"); - } - - // Messages - { - ( - bytes32 msgsInHash, - bytes32 msgsL2ToL1MsgsHash, - bytes32[] memory msgsL1ToL2Msgs, - bytes32[] memory msgsL2ToL1Msgs - ) = messagesHelper.decode(data.block.body); - - assertEq(msgsInHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash msgs"); - assertEq(l1ToL2MessagesHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash full"); - - // assertEq(msgsL2ToL1MsgsHash, b.l2ToL1MessagesHash, "Invalid l2ToL1MsgsHash"); - - // L1 -> L2 messages - assertEq( - msgsL1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length" - ); - assertEq(l1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length"); - for (uint256 i = 0; i < msgsL1ToL2Msgs.length; i++) { - assertEq(msgsL1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs messages"); - assertEq(l1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs full"); - } - - // L2 -> L1 messages - assertEq( - msgsL2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length" - ); - assertEq(l2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length"); - for (uint256 i = 0; i < msgsL2ToL1Msgs.length; i++) { - assertEq(msgsL2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs messages"); - assertEq(l2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs full"); - } - } - - // Txs - { - bytes32 txsHash = txsHelper.decode(data.block.body); - assertEq(txsHash, data.block.calldataHash, "Invalid txs hash"); - assertEq(diffRoot, data.block.calldataHash, "Invalid diff root/calldata hash"); - } - - // The public inputs are computed based of these values, but not directly part of the decoding per say. - } - - function testComputeKernelLogsIterationWithoutLogs() public { - bytes memory kernelLogsLength = hex"00000004"; // 4 bytes containing value 4 - bytes memory iterationLogsLength = hex"00000000"; // 4 empty bytes indicating that length of this iteration's logs is 0 - bytes memory encodedLogs = abi.encodePacked(kernelLogsLength, iterationLogsLength); - - (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - - bytes32 kernelPublicInputsLogsHash = bytes32(0); - bytes32 privateCircuitPublicInputsLogsHash = sha256(new bytes(0)); - - bytes32 referenceLogsHash = - sha256(abi.encodePacked(kernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHash)); - - assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); - assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); - } - - function testComputeKernelLogs1Iteration() public { - // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS || - // K_LOGS_LEN = 4 + 8 = 12 (hex"0000000c") - // I1_LOGS_LEN = 8 (hex"00000008") - // I1_LOGS = 8 bytes (hex"0000000493e78a70") // Note: 00000004 is the length of 1 log within function logs - bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; - // Prefix logs with length of kernel logs (12) and length of iteration 1 logs (8) - bytes memory encodedLogs = abi.encodePacked(hex"0000000c00000008", firstFunctionCallLogs); - (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - - // Zero because this is the first iteration - bytes32 previousKernelPublicInputsLogsHash = bytes32(0); - bytes32 privateCircuitPublicInputsLogsHashFirstCall = sha256(firstFunctionCallLogs); - - bytes32 referenceLogsHash = sha256( - abi.encodePacked( - previousKernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHashFirstCall - ) - ); - - assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); - assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); - } - - function testComputeKernelLogs2Iterations() public { - // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS || - // K_LOGS_LEN = 4 + 8 + 4 + 20 = 36 (hex"00000024") - // I1_LOGS_LEN = 8 (hex"00000008") - // I1_LOGS = 8 random bytes (hex"0000000493e78a70") - // I2_LOGS_LEN = 20 (hex"00000014") - // I2_LOGS = 20 bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") - bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; - bytes memory secondFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; - bytes memory encodedLogs = abi.encodePacked( - hex"0000002400000008", firstFunctionCallLogs, hex"00000014", secondFunctionCallLogs - ); - (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - - bytes32 referenceLogsHashFromIteration1 = - sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); - - bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); - - bytes32 referenceLogsHashFromIteration2 = sha256( - abi.encodePacked( - referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall - ) - ); - - assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); - assertEq(logsHash, referenceLogsHashFromIteration2, "Incorrect logs hash"); - } - - function testComputeKernelLogsMiddleIterationWithoutLogs() public { - // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS | I3_LOGS_LEN | I3_LOGS || - // K_LOGS_LEN = 4 + 8 + 4 + 0 + 4 + 20 = 40 (hex"00000028") - // I1_LOGS_LEN = 8 (hex"00000008") - // I1_LOGS = 8 random bytes (hex"0000000493e78a70") - // I2_LOGS_LEN = 0 (hex"00000000") - // I2_LOGS = 0 bytes (hex"") - // I3_LOGS_LEN = 20 (hex"00000014") - // I3_LOGS = 20 random bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") - bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; - bytes memory secondFunctionCallLogs = hex""; - bytes memory thirdFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; - bytes memory encodedLogs = abi.encodePacked( - hex"0000002800000008", - firstFunctionCallLogs, - hex"00000000", - secondFunctionCallLogs, - hex"00000014", - thirdFunctionCallLogs - ); - (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - - bytes32 referenceLogsHashFromIteration1 = - sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); - - bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); - - bytes32 referenceLogsHashFromIteration2 = sha256( - abi.encodePacked( - referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall - ) - ); - - bytes32 privateCircuitPublicInputsLogsHashThirdCall = sha256(thirdFunctionCallLogs); - - bytes32 referenceLogsHashFromIteration3 = sha256( - abi.encodePacked(referenceLogsHashFromIteration2, privateCircuitPublicInputsLogsHashThirdCall) - ); - - assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); - assertEq(logsHash, referenceLogsHashFromIteration3, "Incorrect logs hash"); - } -} +// // SPDX-License-Identifier: Apache-2.0 +// // Copyright 2023 Aztec Labs. +// pragma solidity >=0.8.18; + +// import {DecoderBase} from "./Base.sol"; + +// import {Hash} from "../../src/core/libraries/Hash.sol"; +// import {DataStructures} from "../../src/core/libraries/DataStructures.sol"; +// import {DecoderHelper} from "../DecoderHelper.sol"; + +// import {Decoder} from "../../src/core/libraries/decoders/Decoder.sol"; +// import {HeaderDecoder} from "../../src/core/libraries/decoders/HeaderDecoder.sol"; +// import {MessagesDecoder} from "../../src/core/libraries/decoders/MessagesDecoder.sol"; +// import {TxsDecoder} from "../../src/core/libraries/decoders/TxsDecoder.sol"; + +// import {AvailabilityOracle} from "../../src/core/availability_oracle/AvailabilityOracle.sol"; + +// contract HeaderDecoderHelper { +// // A wrapper used such that we get "calldata" and not memory +// function decode(bytes calldata _header) +// public +// pure +// returns ( +// uint256 chainId, +// uint256 version, +// uint256 blockNumber, +// uint256 timestamp, +// bytes32 lastArchive +// ) +// { +// return HeaderDecoder.decode(_header); +// } +// } + +// contract MessagesDecoderHelper { +// // A wrapper used such that we get "calldata" and not memory +// function decode(bytes calldata _header) +// public +// pure +// returns ( +// bytes32 l1ToL2MsgsHash, +// bytes32 l2ToL1MsgsHash, +// bytes32[] memory l1ToL2Msgs, +// bytes32[] memory l2ToL1Msgs +// ) +// { +// return MessagesDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); +// } +// } + +// contract TxsDecoderHelper { +// // A wrapper used such that we get "calldata" and not memory +// function decode(bytes calldata _header) public pure returns (bytes32 txsHash) { +// return TxsDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); +// } +// } + +// /** +// * Blocks are generated using the `integration_l1_publisher.test.ts` tests. +// * Main use of these test is shorter cycles when updating the decoder contract. +// * All tests here are skipped (all tests are prefixed with an underscore)! +// * This is because we implicitly test the decoding in integration_l1_publisher.test.ts +// */ +// contract DecoderTest is DecoderBase { +// DecoderHelper internal helper; +// HeaderDecoderHelper internal headerHelper; +// MessagesDecoderHelper internal messagesHelper; +// TxsDecoderHelper internal txsHelper; + +// function setUp() public virtual { +// helper = new DecoderHelper(); +// headerHelper = new HeaderDecoderHelper(); +// messagesHelper = new MessagesDecoderHelper(); +// txsHelper = new TxsDecoderHelper(); +// } + +// function testDecodeBlocks() public { +// _testDecodeBlock("mixed_block_0"); +// _testDecodeBlock("mixed_block_1"); +// _testDecodeBlock("empty_block_0"); +// _testDecodeBlock("empty_block_1"); +// } + +// function _testDecodeBlock(string memory name) public virtual { +// DecoderBase.Full memory data = load(name); + +// // Using the FULL decoder. +// ( +// uint256 l2BlockNumber, +// bytes32 startStateHash, +// bytes32 endStateHash, +// bytes32 publicInputsHash, +// bytes32[] memory l2ToL1Msgs, +// bytes32[] memory l1ToL2Msgs +// ) = helper.decode(data.block.body); +// (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = +// helper.computeDiffRootAndMessagesHash(data.block.body); + +// // Header +// { +// (uint256 headerL2BlockNumber, bytes32 headerStartStateHash, bytes32 headerEndStateHash) = +// headerHelper.decode(data.block.body); + +// assertEq(l2BlockNumber, data.block.blockNumber, "Invalid block number"); +// assertEq(headerL2BlockNumber, data.block.blockNumber, "Invalid block number"); +// assertEq(startStateHash, data.block.startStateHash, "Invalid start state hash"); +// assertEq(headerStartStateHash, data.block.startStateHash, "Invalid start state hash"); +// assertEq(endStateHash, data.block.endStateHash, "Invalid end state hash"); +// assertEq(headerEndStateHash, data.block.endStateHash, "Invalid end state hash"); +// } + +// // Messages +// { +// ( +// bytes32 msgsInHash, +// bytes32 msgsL2ToL1MsgsHash, +// bytes32[] memory msgsL1ToL2Msgs, +// bytes32[] memory msgsL2ToL1Msgs +// ) = messagesHelper.decode(data.block.body); + +// assertEq(msgsInHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash msgs"); +// assertEq(l1ToL2MessagesHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash full"); + +// // assertEq(msgsL2ToL1MsgsHash, b.l2ToL1MessagesHash, "Invalid l2ToL1MsgsHash"); + +// // L1 -> L2 messages +// assertEq( +// msgsL1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length" +// ); +// assertEq(l1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length"); +// for (uint256 i = 0; i < msgsL1ToL2Msgs.length; i++) { +// assertEq(msgsL1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs messages"); +// assertEq(l1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs full"); +// } + +// // L2 -> L1 messages +// assertEq( +// msgsL2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length" +// ); +// assertEq(l2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length"); +// for (uint256 i = 0; i < msgsL2ToL1Msgs.length; i++) { +// assertEq(msgsL2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs messages"); +// assertEq(l2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs full"); +// } +// } + +// // Txs +// { +// bytes32 txsHash = txsHelper.decode(data.block.body); +// assertEq(txsHash, data.block.calldataHash, "Invalid txs hash"); +// assertEq(diffRoot, data.block.calldataHash, "Invalid diff root/calldata hash"); +// } + +// // The public inputs are computed based of these values, but not directly part of the decoding per say. +// } + +// function testComputeKernelLogsIterationWithoutLogs() public { +// bytes memory kernelLogsLength = hex"00000004"; // 4 bytes containing value 4 +// bytes memory iterationLogsLength = hex"00000000"; // 4 empty bytes indicating that length of this iteration's logs is 0 +// bytes memory encodedLogs = abi.encodePacked(kernelLogsLength, iterationLogsLength); + +// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + +// bytes32 kernelPublicInputsLogsHash = bytes32(0); +// bytes32 privateCircuitPublicInputsLogsHash = sha256(new bytes(0)); + +// bytes32 referenceLogsHash = +// sha256(abi.encodePacked(kernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHash)); + +// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); +// assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); +// } + +// function testComputeKernelLogs1Iteration() public { +// // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS || +// // K_LOGS_LEN = 4 + 8 = 12 (hex"0000000c") +// // I1_LOGS_LEN = 8 (hex"00000008") +// // I1_LOGS = 8 bytes (hex"0000000493e78a70") // Note: 00000004 is the length of 1 log within function logs +// bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; +// // Prefix logs with length of kernel logs (12) and length of iteration 1 logs (8) +// bytes memory encodedLogs = abi.encodePacked(hex"0000000c00000008", firstFunctionCallLogs); +// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + +// // Zero because this is the first iteration +// bytes32 previousKernelPublicInputsLogsHash = bytes32(0); +// bytes32 privateCircuitPublicInputsLogsHashFirstCall = sha256(firstFunctionCallLogs); + +// bytes32 referenceLogsHash = sha256( +// abi.encodePacked( +// previousKernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHashFirstCall +// ) +// ); + +// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); +// assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); +// } + +// function testComputeKernelLogs2Iterations() public { +// // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS || +// // K_LOGS_LEN = 4 + 8 + 4 + 20 = 36 (hex"00000024") +// // I1_LOGS_LEN = 8 (hex"00000008") +// // I1_LOGS = 8 random bytes (hex"0000000493e78a70") +// // I2_LOGS_LEN = 20 (hex"00000014") +// // I2_LOGS = 20 bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") +// bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; +// bytes memory secondFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; +// bytes memory encodedLogs = abi.encodePacked( +// hex"0000002400000008", firstFunctionCallLogs, hex"00000014", secondFunctionCallLogs +// ); +// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + +// bytes32 referenceLogsHashFromIteration1 = +// sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); + +// bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); + +// bytes32 referenceLogsHashFromIteration2 = sha256( +// abi.encodePacked( +// referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall +// ) +// ); + +// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); +// assertEq(logsHash, referenceLogsHashFromIteration2, "Incorrect logs hash"); +// } + +// function testComputeKernelLogsMiddleIterationWithoutLogs() public { +// // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS | I3_LOGS_LEN | I3_LOGS || +// // K_LOGS_LEN = 4 + 8 + 4 + 0 + 4 + 20 = 40 (hex"00000028") +// // I1_LOGS_LEN = 8 (hex"00000008") +// // I1_LOGS = 8 random bytes (hex"0000000493e78a70") +// // I2_LOGS_LEN = 0 (hex"00000000") +// // I2_LOGS = 0 bytes (hex"") +// // I3_LOGS_LEN = 20 (hex"00000014") +// // I3_LOGS = 20 random bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") +// bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; +// bytes memory secondFunctionCallLogs = hex""; +// bytes memory thirdFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; +// bytes memory encodedLogs = abi.encodePacked( +// hex"0000002800000008", +// firstFunctionCallLogs, +// hex"00000000", +// secondFunctionCallLogs, +// hex"00000014", +// thirdFunctionCallLogs +// ); +// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + +// bytes32 referenceLogsHashFromIteration1 = +// sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); + +// bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); + +// bytes32 referenceLogsHashFromIteration2 = sha256( +// abi.encodePacked( +// referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall +// ) +// ); + +// bytes32 privateCircuitPublicInputsLogsHashThirdCall = sha256(thirdFunctionCallLogs); + +// bytes32 referenceLogsHashFromIteration3 = sha256( +// abi.encodePacked(referenceLogsHashFromIteration2, privateCircuitPublicInputsLogsHashThirdCall) +// ); + +// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); +// assertEq(logsHash, referenceLogsHashFromIteration3, "Incorrect logs hash"); +// } +// } From 6d4f22ab4fbe9345701872b146598f5b9af976ca Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 15 Jan 2024 09:30:59 +0000 Subject: [PATCH 7/9] WIP --- .../archiver/src/archiver/archiver.test.ts | 8 +-- .../archiver/src/archiver/eth_log_handlers.ts | 14 +++-- yarn-project/circuit-types/src/l2_block.ts | 52 +++---------------- .../src/integration_l1_publisher.test.ts | 26 +++++++--- .../block_builder/solo_block_builder.test.ts | 6 +-- .../src/block_builder/solo_block_builder.ts | 7 +-- .../src/publisher/l1-publisher.ts | 26 ++++++---- .../src/publisher/viem-tx-sender.ts | 9 +++- 8 files changed, 66 insertions(+), 82 deletions(-) diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index d873da2de833..cacd3560a773 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -269,7 +269,7 @@ describe('Archiver', () => { function makeL2BlockProcessedEvent(l1BlockNum: bigint, l2BlockNum: bigint) { return { blockNumber: l1BlockNum, - args: { blockNum: l2BlockNum }, + args: { blockNumber: l2BlockNum }, transactionHash: `0x${l2BlockNum}`, } as Log; } @@ -349,8 +349,10 @@ function makeL1ToL2MessageCancelledEvents(l1BlockNum: bigint, entryKeys: string[ * @returns A fake tx with calldata that corresponds to calling process in the Rollup contract. */ function makeRollupTx(l2Block: L2Block) { + const header = toHex(l2Block.header.toBuffer()); + const archive = toHex(l2Block.archive.toBuffer()); + const body = toHex(l2Block.bodyToBuffer()); const proof = `0x`; - const block = toHex(l2Block.toBufferWithLogs()); - const input = encodeFunctionData({ abi: RollupAbi, functionName: 'process', args: [proof, block] }); + const input = encodeFunctionData({ abi: RollupAbi, functionName: 'process', args: [header, archive, body, proof] }); return { input } as Transaction; } diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index ebdd62a3e98b..0f0df679f3b6 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -76,12 +76,12 @@ export async function processBlockLogs( ): Promise { const retrievedBlocks: L2Block[] = []; for (const log of logs) { - const blockNum = log.args.blockNum; + const blockNum = log.args.blockNumber; if (blockNum !== expectedL2BlockNumber) { throw new Error('Block number mismatch. Expected: ' + expectedL2BlockNumber + ' but got: ' + blockNum + '.'); } // TODO: Fetch blocks from calldata in parallel - const newBlock = await getBlockFromCallData(publicClient, log.transactionHash!, log.args.blockNum); + const newBlock = await getBlockFromCallData(publicClient, log.transactionHash!, log.args.blockNumber); newBlock.setL1BlockNumber(log.blockNumber!); retrievedBlocks.push(newBlock); expectedL2BlockNumber++; @@ -112,8 +112,14 @@ async function getBlockFromCallData( if (functionName !== 'process') { throw new Error(`Unexpected method called ${functionName}`); } - const [, l2BlockHex] = args! as [Hex, Hex]; - const block = L2Block.fromBufferWithLogs(Buffer.from(hexToBytes(l2BlockHex))); + // TODO(benesjan): This is brittle and should be handled inside the L2 Block. + const [headerHex, archiveHex, bodyHex] = args! as [Hex, Hex, Hex, Hex]; + const blockBuffer = Buffer.concat([ + Buffer.from(hexToBytes(headerHex)), + Buffer.from(hexToBytes(archiveHex)), + Buffer.from(hexToBytes(bodyHex)), + ]); + const block = L2Block.fromBufferWithLogs(blockBuffer); if (BigInt(block.number) !== l2BlockNum) { throw new Error(`Block number mismatch: expected ${l2BlockNum} but got ${block.number}`); } diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index 8d539e90785c..e0ada8a4fca3 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -263,19 +263,7 @@ export class L2Block { */ toBuffer() { return serializeToBuffer( - this.header.globalVariables, - // TODO(#3868) - AppendOnlyTreeSnapshot.empty(), // this.startNoteHashTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startNullifierTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startContractTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startPublicDataTreeSnapshot, - AppendOnlyTreeSnapshot.empty(), // this.startL1ToL2MessageTreeSnapshot, - this.header.lastArchive, - this.header.state.partial.noteHashTree, - this.header.state.partial.nullifierTree, - this.header.state.partial.contractTree, - this.header.state.partial.publicDataTree, - this.header.state.l1ToL2MessageTree, + this.header, this.archive, this.newCommitments.length, this.newCommitments, @@ -308,13 +296,6 @@ export class L2Block { return serializeToBuffer(this.toBuffer(), this.newEncryptedLogs, this.newUnencryptedLogs); } - headerAndArchiveToBuffer() { - return serializeToBuffer( - this.header, - this.archive, - ); - } - bodyToBuffer(): Buffer { if (this.newEncryptedLogs === undefined || this.newUnencryptedLogs === undefined) { throw new Error( @@ -335,7 +316,7 @@ export class L2Block { this.newContracts, this.newContractData, this.newL1ToL2Messages.length, - this.newL1ToL2Messages, + this.newL1ToL2Messages, this.newEncryptedLogs, this.newUnencryptedLogs ); } @@ -357,20 +338,8 @@ export class L2Block { */ static fromBuffer(buf: Buffer | BufferReader, blockHash?: Buffer) { const reader = BufferReader.asReader(buf); - const globalVariables = reader.readObject(GlobalVariables); - // TODO(#3938): update the encoding here - reader.readObject(AppendOnlyTreeSnapshot); // startNoteHashTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startNullifierTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startContractTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startPublicDataTreeSnapshot - reader.readObject(AppendOnlyTreeSnapshot); // startL1ToL2MessageTreeSnapshot - const startArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endNoteHashTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endNullifierTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endContractTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endPublicDataTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endL1ToL2MessageTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); + const header = reader.readObject(Header); + const archive = reader.readObject(AppendOnlyTreeSnapshot); const newCommitments = reader.readVector(Fr); const newNullifiers = reader.readVector(Fr); const newPublicDataWrites = reader.readVector(PublicDataWrite); @@ -380,19 +349,9 @@ export class L2Block { // TODO(sean): could an optimization of this be that it is encoded such that zeros are assumed const newL1ToL2Messages = reader.readVector(Fr); - const partial = new PartialStateReference( - endNoteHashTreeSnapshot, - endNullifierTreeSnapshot, - endContractTreeSnapshot, - endPublicDataTreeSnapshot, - ); - const state = new StateReference(endL1ToL2MessageTreeSnapshot, partial); - // TODO(#3938): populate bodyHash - const header = new Header(startArchiveSnapshot, [Fr.ZERO, Fr.ZERO], state, globalVariables); - return L2Block.fromFields( { - archive: endArchiveSnapshot, + archive, header, newCommitments, newNullifiers, @@ -565,6 +524,7 @@ export class L2Block { * and inside the circuit, it is part of the public inputs. * @returns The calldata hash. */ + // TODO(benesjan): Update to getBodyHash getCalldataHash() { if (this.newEncryptedLogs === undefined) { throw new Error('Encrypted logs has to be attached before calling "getCalldataHash"'); diff --git a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts index a179716361ce..879279a20978 100644 --- a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts @@ -280,7 +280,7 @@ describe('L1Publisher integration', () => { }; it(`Build ${numberOfConsecutiveBlocks} blocks of 4 bloated txs building on each other`, async () => { - const stateInRollup_ = await rollup.read.rollupStateHash(); + const stateInRollup_ = await rollup.read.archive(); expect(hexStringToBuffer(stateInRollup_.toString())).toEqual(Buffer.alloc(32, 0)); const blockNumber = await publicClient.getBlockNumber(); @@ -362,7 +362,7 @@ describe('L1Publisher integration', () => { fromBlock: blockNumber + 1n, }); expect(logs).toHaveLength(i + 1); - expect(logs[i].args.blockNum).toEqual(BigInt(i + 1)); + expect(logs[i].args.blockNumber).toEqual(BigInt(i + 1)); const ethTx = await publicClient.getTransaction({ hash: logs[i].transactionHash!, @@ -371,14 +371,19 @@ describe('L1Publisher integration', () => { const expectedData = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [`0x${l2Proof.toString('hex')}`, `0x${block.toBufferWithLogs().toString('hex')}`], + args: [ + `0x${block.header.toBuffer().toString('hex')}`, + `0x${block.archive.toBuffer().toString('hex')}`, + `0x${block.bodyToBuffer().toString('hex')}`, + `0x${l2Proof.toString('hex')}`, + ], }); expect(ethTx.input).toEqual(expectedData); const decoderArgs = [`0x${block.toBufferWithLogs().toString('hex')}`] as const; const decodedHashes = await decoderHelper.read.computeDiffRootAndMessagesHash(decoderArgs); const decodedRes = await decoderHelper.read.decode(decoderArgs); - const stateInRollup = await rollup.read.rollupStateHash(); + const stateInRollup = await rollup.read.archive(); expect(block.number).toEqual(Number(decodedRes[0])); expect(block.getStartStateHash()).toEqual(hexStringToBuffer(decodedRes[1].toString())); @@ -403,7 +408,7 @@ describe('L1Publisher integration', () => { }, 360_000); it(`Build ${numberOfConsecutiveBlocks} blocks of 4 empty txs building on each other`, async () => { - const stateInRollup_ = await rollup.read.rollupStateHash(); + const stateInRollup_ = await rollup.read.archive(); expect(hexStringToBuffer(stateInRollup_.toString())).toEqual(Buffer.alloc(32, 0)); const blockNumber = await publicClient.getBlockNumber(); @@ -438,7 +443,7 @@ describe('L1Publisher integration', () => { fromBlock: blockNumber + 1n, }); expect(logs).toHaveLength(i + 1); - expect(logs[i].args.blockNum).toEqual(BigInt(i + 1)); + expect(logs[i].args.blockNumber).toEqual(BigInt(i + 1)); const ethTx = await publicClient.getTransaction({ hash: logs[i].transactionHash!, @@ -447,14 +452,19 @@ describe('L1Publisher integration', () => { const expectedData = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [`0x${l2Proof.toString('hex')}`, `0x${block.toBufferWithLogs().toString('hex')}`], + args: [ + `0x${block.header.toBuffer().toString('hex')}`, + `0x${block.archive.toBuffer().toString('hex')}`, + `0x${block.bodyToBuffer().toString('hex')}`, + `0x${l2Proof.toString('hex')}`, + ], }); expect(ethTx.input).toEqual(expectedData); const decoderArgs = [`0x${block.toBufferWithLogs().toString('hex')}`] as const; const decodedHashes = await decoderHelper.read.computeDiffRootAndMessagesHash(decoderArgs); const decodedRes = await decoderHelper.read.decode(decoderArgs); - const stateInRollup = await rollup.read.rollupStateHash(); + const stateInRollup = await rollup.read.archive(); expect(block.number).toEqual(Number(decodedRes[0])); expect(block.getStartStateHash()).toEqual(hexStringToBuffer(decodedRes[1].toString())); diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts index 5a390b36fa6f..a0e03bb5afc8 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts @@ -250,11 +250,7 @@ describe('sequencer/solo_block_builder', () => { newUnencryptedLogs, }); - const callDataHash = l2Block.getCalldataHash(); - const high = Fr.fromBuffer(callDataHash.slice(0, 16)); - const low = Fr.fromBuffer(callDataHash.slice(16, 32)); - - rootRollupOutput.header.bodyHash = [high, low]; + rootRollupOutput.header.bodyHash = l2Block.getCalldataHash(); return txs; }; diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts index ee8d709fee08..fd74a0908162 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts @@ -144,11 +144,12 @@ export class SoloBlockBuilder implements BlockBuilder { newUnencryptedLogs, }); + // TODO: update naming here if (!l2Block.getCalldataHash().equals(circuitsOutput.header.bodyHash)) { throw new Error( - `Calldata hash mismatch, ${l2Block.getCalldataHash().toString('hex')} == ${circuitsOutput - .sha256CalldataHash() - .toString('hex')} `, + `Calldata hash mismatch, ${l2Block + .getCalldataHash() + .toString('hex')} == ${circuitsOutput.header.bodyHash.toString('hex')} `, ); } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index db6b3cdfa242..efa87c385424 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -82,17 +82,17 @@ export interface L1PublisherTxSender { } /** - * Encoded block data and proof ready to be pushed to the L1 contract. + * Encoded block and proof ready to be pushed to the L1 contract. */ export type L1ProcessArgs = { - /** - * Root rollup proof for an L1 block. - */ + /** The L2 block header. */ + header: Buffer; + /** A snapshot (root and next available leaf index) of the archive tree after the L2 block is applied. */ + archive: Buffer; + /** L2 block body. */ + body: Buffer; + /** Root rollup proof of the L2 block. */ proof: Buffer; - /** - * Serialized L2Block data. - */ - inputs: Buffer; }; /** @@ -129,9 +129,12 @@ export class L1Publisher implements L2BlockReceiver { * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ public async publishL2Block(block: L2Block): Promise { - const proof = Buffer.alloc(0); - - const txData = { proof, inputs: block.toBufferWithLogs() }; + const txData = { + header: block.header.toBuffer(), + archive: block.archive.toBuffer(), + body: block.bodyToBuffer(), + proof: Buffer.alloc(0), + }; const startStateHash = block.getStartStateHash(); while (!this.interrupted) { @@ -243,6 +246,7 @@ export class L1Publisher implements L2BlockReceiver { * @param startStateHash - The start state hash of the block we wish to publish. * @returns Boolean indicating if the hashes are equal. */ + // TODO(benesjan): rename this private async checkStartStateHash(startStateHash: Buffer): Promise { const fromChain = await this.txSender.getCurrentStateHash(); const areSame = startStateHash.equals(fromChain); diff --git a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts index 89a3037ec325..e301b6d0b632 100644 --- a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts +++ b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts @@ -76,7 +76,7 @@ export class ViemTxSender implements L1PublisherTxSender { } async getCurrentStateHash(): Promise { - const stateHash = await this.rollupContract.read.rollupStateHash(); + const stateHash = await this.rollupContract.read.archive(); return Buffer.from(stateHash.replace('0x', ''), 'hex'); } @@ -122,7 +122,12 @@ export class ViemTxSender implements L1PublisherTxSender { * @returns The hash of the mined tx. */ async sendProcessTx(encodedData: ProcessTxArgs): Promise { - const args = [`0x${encodedData.proof.toString('hex')}`, `0x${encodedData.inputs.toString('hex')}`] as const; + const args = [ + `0x${encodedData.header.toString('hex')}`, + `0x${encodedData.archive.toString('hex')}`, + `0x${encodedData.body.toString('hex')}`, + `0x${encodedData.proof.toString('hex')}`, + ] as const; const gas = await this.rollupContract.estimateGas.process(args, { account: this.account, From 1db52cdab4bb4d1e48eaf17a2af97dadde3e4182 Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 15 Jan 2024 09:33:53 +0000 Subject: [PATCH 8/9] Revert "temporarily disabling contract tests" This reverts commit 757dc29dfac82f89ec30d3d071d39073b1ca1d37. --- l1-contracts/test/Rollup.t.sol | 308 ++++++------- l1-contracts/test/decoders/Decoder.t.sol | 532 +++++++++++------------ 2 files changed, 417 insertions(+), 423 deletions(-) diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 7caf9cdc04b5..97c05292291a 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -1,160 +1,160 @@ -// // SPDX-License-Identifier: Apache-2.0 -// // Copyright 2023 Aztec Labs. -// pragma solidity >=0.8.18; +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2023 Aztec Labs. +pragma solidity >=0.8.18; -// import {Test} from "forge-std/Test.sol"; +import {Test} from "forge-std/Test.sol"; -// import {DecoderTest} from "./decoders/Decoder.t.sol"; -// import {DecoderHelper} from "./DecoderHelper.sol"; +import {DecoderTest} from "./decoders/Decoder.t.sol"; +import {DecoderHelper} from "./DecoderHelper.sol"; -// import {DecoderBase} from "./decoders/Base.sol"; +import {DecoderBase} from "./decoders/Base.sol"; -// import {DataStructures} from "../src/core/libraries/DataStructures.sol"; +import {DataStructures} from "../src/core/libraries/DataStructures.sol"; -// import {Registry} from "../src/core/messagebridge/Registry.sol"; -// import {Inbox} from "../src/core/messagebridge/Inbox.sol"; -// import {Outbox} from "../src/core/messagebridge/Outbox.sol"; -// import {Errors} from "../src/core/libraries/Errors.sol"; -// import {Rollup} from "../src/core/Rollup.sol"; +import {Registry} from "../src/core/messagebridge/Registry.sol"; +import {Inbox} from "../src/core/messagebridge/Inbox.sol"; +import {Outbox} from "../src/core/messagebridge/Outbox.sol"; +import {Errors} from "../src/core/libraries/Errors.sol"; +import {Rollup} from "../src/core/Rollup.sol"; -// /** -// * Blocks are generated using the `integration_l1_publisher.test.ts` tests. -// * Main use of these test is shorter cycles when updating the decoder contract. -// */ -// contract RollupTest is DecoderBase { -// DecoderHelper internal helper; -// Registry internal registry; -// Inbox internal inbox; -// Outbox internal outbox; -// Rollup internal rollup; - -// function setUp() public virtual { -// helper = new DecoderHelper(); - -// registry = new Registry(); -// inbox = new Inbox(address(registry)); -// outbox = new Outbox(address(registry)); -// rollup = new Rollup(registry); - -// registry.upgrade(address(rollup), address(inbox), address(outbox)); -// } - -// function testMixedBlock() public { -// _testBlock("mixed_block_0"); -// } - -// function testConsecutiveMixedBlocks() public { -// _testBlock("mixed_block_0"); -// _testBlock("mixed_block_1"); -// } - -// function testEmptyBlock() public { -// _testBlock("empty_block_0"); -// } - -// function testConsecutiveEmptyBlocks() public { -// _testBlock("empty_block_0"); -// _testBlock("empty_block_1"); -// } - -// function testRevertInvalidChainId() public { -// bytes memory block_ = load("empty_block_0").block.body; - -// assembly { -// mstore(add(block_, 0x20), 0x420) -// } - -// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 0x420, 31337)); -// rollup.process(bytes(""), block_); -// } - -// function testRevertInvalidVersion() public { -// bytes memory block_ = load("empty_block_0").block.body; - -// assembly { -// mstore(add(block_, 0x40), 0x420) -// } - -// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 0x420, 1)); -// rollup.process(bytes(""), block_); -// } - -// function testRevertTimestampInFuture() public { -// bytes memory block_ = load("empty_block_0").block.body; - -// uint256 ts = block.timestamp + 1; -// assembly { -// mstore(add(block_, 0x80), ts) -// } - -// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampInFuture.selector)); -// rollup.process(bytes(""), block_); -// } - -// function testRevertTimestampTooOld() public { -// bytes memory block_ = load("empty_block_0").block.body; - -// // Overwrite in the rollup contract -// vm.store(address(rollup), bytes32(uint256(1)), bytes32(uint256(block.timestamp))); - -// vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampTooOld.selector)); -// rollup.process(bytes(""), block_); -// } - -// function _testBlock(string memory name) public { -// DecoderBase.Full memory full = load(name); -// // We jump to the time of the block. -// vm.warp(full.block.timestamp); - -// _populateInbox(full.populate.sender, full.populate.recipient, full.populate.l1ToL2Content); - -// for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { -// if (full.messages.l1ToL2Messages[i] == bytes32(0)) { -// continue; -// } -// assertTrue(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not in inbox"); -// } - -// vm.record(); -// rollup.process(bytes(""), full.block.body); - -// (, bytes32[] memory inboxWrites) = vm.accesses(address(inbox)); -// (, bytes32[] memory outboxWrites) = vm.accesses(address(outbox)); - -// { -// uint256 count = 0; -// for (uint256 i = 0; i < full.messages.l2ToL1Messages.length; i++) { -// if (full.messages.l2ToL1Messages[i] == bytes32(0)) { -// continue; -// } -// assertTrue(outbox.contains(full.messages.l2ToL1Messages[i]), "msg not in outbox"); -// count++; -// } -// assertEq(outboxWrites.length, count, "Invalid outbox writes"); -// } - -// { -// uint256 count = 0; -// for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { -// if (full.messages.l1ToL2Messages[i] == bytes32(0)) { -// continue; -// } -// assertFalse(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not consumed"); -// count++; -// } -// assertEq(inboxWrites.length, count, "Invalid inbox writes"); -// } - -// assertEq(rollup.rollupStateHash(), full.block.endStateHash, "Invalid rollup state hash"); -// } - -// function _populateInbox(address _sender, bytes32 _recipient, bytes32[] memory _contents) internal { -// uint32 deadline = type(uint32).max; -// for (uint256 i = 0; i < _contents.length; i++) { -// vm.prank(_sender); -// inbox.sendL2Message( -// DataStructures.L2Actor({actor: _recipient, version: 1}), deadline, _contents[i], bytes32(0) -// ); -// } -// } -// } +/** + * Blocks are generated using the `integration_l1_publisher.test.ts` tests. + * Main use of these test is shorter cycles when updating the decoder contract. + */ +contract RollupTest is DecoderBase { + DecoderHelper internal helper; + Registry internal registry; + Inbox internal inbox; + Outbox internal outbox; + Rollup internal rollup; + + function setUp() public virtual { + helper = new DecoderHelper(); + + registry = new Registry(); + inbox = new Inbox(address(registry)); + outbox = new Outbox(address(registry)); + rollup = new Rollup(registry); + + registry.upgrade(address(rollup), address(inbox), address(outbox)); + } + + function testMixedBlock() public { + _testBlock("mixed_block_0"); + } + + function testConsecutiveMixedBlocks() public { + _testBlock("mixed_block_0"); + _testBlock("mixed_block_1"); + } + + function testEmptyBlock() public { + _testBlock("empty_block_0"); + } + + function testConsecutiveEmptyBlocks() public { + _testBlock("empty_block_0"); + _testBlock("empty_block_1"); + } + + function testRevertInvalidChainId() public { + bytes memory block_ = load("empty_block_0").block.body; + + assembly { + mstore(add(block_, 0x20), 0x420) + } + + vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 0x420, 31337)); + rollup.process(bytes(""), block_); + } + + function testRevertInvalidVersion() public { + bytes memory block_ = load("empty_block_0").block.body; + + assembly { + mstore(add(block_, 0x40), 0x420) + } + + vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 0x420, 1)); + rollup.process(bytes(""), block_); + } + + function testRevertTimestampInFuture() public { + bytes memory block_ = load("empty_block_0").block.body; + + uint256 ts = block.timestamp + 1; + assembly { + mstore(add(block_, 0x80), ts) + } + + vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampInFuture.selector)); + rollup.process(bytes(""), block_); + } + + function testRevertTimestampTooOld() public { + bytes memory block_ = load("empty_block_0").block.body; + + // Overwrite in the rollup contract + vm.store(address(rollup), bytes32(uint256(1)), bytes32(uint256(block.timestamp))); + + vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TimestampTooOld.selector)); + rollup.process(bytes(""), block_); + } + + function _testBlock(string memory name) public { + DecoderBase.Full memory full = load(name); + // We jump to the time of the block. + vm.warp(full.block.timestamp); + + _populateInbox(full.populate.sender, full.populate.recipient, full.populate.l1ToL2Content); + + for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { + if (full.messages.l1ToL2Messages[i] == bytes32(0)) { + continue; + } + assertTrue(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not in inbox"); + } + + vm.record(); + rollup.process(bytes(""), full.block.body); + + (, bytes32[] memory inboxWrites) = vm.accesses(address(inbox)); + (, bytes32[] memory outboxWrites) = vm.accesses(address(outbox)); + + { + uint256 count = 0; + for (uint256 i = 0; i < full.messages.l2ToL1Messages.length; i++) { + if (full.messages.l2ToL1Messages[i] == bytes32(0)) { + continue; + } + assertTrue(outbox.contains(full.messages.l2ToL1Messages[i]), "msg not in outbox"); + count++; + } + assertEq(outboxWrites.length, count, "Invalid outbox writes"); + } + + { + uint256 count = 0; + for (uint256 i = 0; i < full.messages.l1ToL2Messages.length; i++) { + if (full.messages.l1ToL2Messages[i] == bytes32(0)) { + continue; + } + assertFalse(inbox.contains(full.messages.l1ToL2Messages[i]), "msg not consumed"); + count++; + } + assertEq(inboxWrites.length, count, "Invalid inbox writes"); + } + + assertEq(rollup.rollupStateHash(), full.block.endStateHash, "Invalid rollup state hash"); + } + + function _populateInbox(address _sender, bytes32 _recipient, bytes32[] memory _contents) internal { + uint32 deadline = type(uint32).max; + for (uint256 i = 0; i < _contents.length; i++) { + vm.prank(_sender); + inbox.sendL2Message( + DataStructures.L2Actor({actor: _recipient, version: 1}), deadline, _contents[i], bytes32(0) + ); + } + } +} diff --git a/l1-contracts/test/decoders/Decoder.t.sol b/l1-contracts/test/decoders/Decoder.t.sol index ae01d8616aad..c846f12a29a1 100644 --- a/l1-contracts/test/decoders/Decoder.t.sol +++ b/l1-contracts/test/decoders/Decoder.t.sol @@ -1,269 +1,263 @@ -// // SPDX-License-Identifier: Apache-2.0 -// // Copyright 2023 Aztec Labs. -// pragma solidity >=0.8.18; - -// import {DecoderBase} from "./Base.sol"; - -// import {Hash} from "../../src/core/libraries/Hash.sol"; -// import {DataStructures} from "../../src/core/libraries/DataStructures.sol"; -// import {DecoderHelper} from "../DecoderHelper.sol"; - -// import {Decoder} from "../../src/core/libraries/decoders/Decoder.sol"; -// import {HeaderDecoder} from "../../src/core/libraries/decoders/HeaderDecoder.sol"; -// import {MessagesDecoder} from "../../src/core/libraries/decoders/MessagesDecoder.sol"; -// import {TxsDecoder} from "../../src/core/libraries/decoders/TxsDecoder.sol"; - -// import {AvailabilityOracle} from "../../src/core/availability_oracle/AvailabilityOracle.sol"; - -// contract HeaderDecoderHelper { -// // A wrapper used such that we get "calldata" and not memory -// function decode(bytes calldata _header) -// public -// pure -// returns ( -// uint256 chainId, -// uint256 version, -// uint256 blockNumber, -// uint256 timestamp, -// bytes32 lastArchive -// ) -// { -// return HeaderDecoder.decode(_header); -// } -// } - -// contract MessagesDecoderHelper { -// // A wrapper used such that we get "calldata" and not memory -// function decode(bytes calldata _header) -// public -// pure -// returns ( -// bytes32 l1ToL2MsgsHash, -// bytes32 l2ToL1MsgsHash, -// bytes32[] memory l1ToL2Msgs, -// bytes32[] memory l2ToL1Msgs -// ) -// { -// return MessagesDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); -// } -// } - -// contract TxsDecoderHelper { -// // A wrapper used such that we get "calldata" and not memory -// function decode(bytes calldata _header) public pure returns (bytes32 txsHash) { -// return TxsDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); -// } -// } - -// /** -// * Blocks are generated using the `integration_l1_publisher.test.ts` tests. -// * Main use of these test is shorter cycles when updating the decoder contract. -// * All tests here are skipped (all tests are prefixed with an underscore)! -// * This is because we implicitly test the decoding in integration_l1_publisher.test.ts -// */ -// contract DecoderTest is DecoderBase { -// DecoderHelper internal helper; -// HeaderDecoderHelper internal headerHelper; -// MessagesDecoderHelper internal messagesHelper; -// TxsDecoderHelper internal txsHelper; - -// function setUp() public virtual { -// helper = new DecoderHelper(); -// headerHelper = new HeaderDecoderHelper(); -// messagesHelper = new MessagesDecoderHelper(); -// txsHelper = new TxsDecoderHelper(); -// } - -// function testDecodeBlocks() public { -// _testDecodeBlock("mixed_block_0"); -// _testDecodeBlock("mixed_block_1"); -// _testDecodeBlock("empty_block_0"); -// _testDecodeBlock("empty_block_1"); -// } - -// function _testDecodeBlock(string memory name) public virtual { -// DecoderBase.Full memory data = load(name); - -// // Using the FULL decoder. -// ( -// uint256 l2BlockNumber, -// bytes32 startStateHash, -// bytes32 endStateHash, -// bytes32 publicInputsHash, -// bytes32[] memory l2ToL1Msgs, -// bytes32[] memory l1ToL2Msgs -// ) = helper.decode(data.block.body); -// (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = -// helper.computeDiffRootAndMessagesHash(data.block.body); - -// // Header -// { -// (uint256 headerL2BlockNumber, bytes32 headerStartStateHash, bytes32 headerEndStateHash) = -// headerHelper.decode(data.block.body); - -// assertEq(l2BlockNumber, data.block.blockNumber, "Invalid block number"); -// assertEq(headerL2BlockNumber, data.block.blockNumber, "Invalid block number"); -// assertEq(startStateHash, data.block.startStateHash, "Invalid start state hash"); -// assertEq(headerStartStateHash, data.block.startStateHash, "Invalid start state hash"); -// assertEq(endStateHash, data.block.endStateHash, "Invalid end state hash"); -// assertEq(headerEndStateHash, data.block.endStateHash, "Invalid end state hash"); -// } - -// // Messages -// { -// ( -// bytes32 msgsInHash, -// bytes32 msgsL2ToL1MsgsHash, -// bytes32[] memory msgsL1ToL2Msgs, -// bytes32[] memory msgsL2ToL1Msgs -// ) = messagesHelper.decode(data.block.body); - -// assertEq(msgsInHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash msgs"); -// assertEq(l1ToL2MessagesHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash full"); - -// // assertEq(msgsL2ToL1MsgsHash, b.l2ToL1MessagesHash, "Invalid l2ToL1MsgsHash"); - -// // L1 -> L2 messages -// assertEq( -// msgsL1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length" -// ); -// assertEq(l1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length"); -// for (uint256 i = 0; i < msgsL1ToL2Msgs.length; i++) { -// assertEq(msgsL1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs messages"); -// assertEq(l1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs full"); -// } - -// // L2 -> L1 messages -// assertEq( -// msgsL2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length" -// ); -// assertEq(l2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length"); -// for (uint256 i = 0; i < msgsL2ToL1Msgs.length; i++) { -// assertEq(msgsL2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs messages"); -// assertEq(l2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs full"); -// } -// } - -// // Txs -// { -// bytes32 txsHash = txsHelper.decode(data.block.body); -// assertEq(txsHash, data.block.calldataHash, "Invalid txs hash"); -// assertEq(diffRoot, data.block.calldataHash, "Invalid diff root/calldata hash"); -// } - -// // The public inputs are computed based of these values, but not directly part of the decoding per say. -// } - -// function testComputeKernelLogsIterationWithoutLogs() public { -// bytes memory kernelLogsLength = hex"00000004"; // 4 bytes containing value 4 -// bytes memory iterationLogsLength = hex"00000000"; // 4 empty bytes indicating that length of this iteration's logs is 0 -// bytes memory encodedLogs = abi.encodePacked(kernelLogsLength, iterationLogsLength); - -// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - -// bytes32 kernelPublicInputsLogsHash = bytes32(0); -// bytes32 privateCircuitPublicInputsLogsHash = sha256(new bytes(0)); - -// bytes32 referenceLogsHash = -// sha256(abi.encodePacked(kernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHash)); - -// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); -// assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); -// } - -// function testComputeKernelLogs1Iteration() public { -// // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS || -// // K_LOGS_LEN = 4 + 8 = 12 (hex"0000000c") -// // I1_LOGS_LEN = 8 (hex"00000008") -// // I1_LOGS = 8 bytes (hex"0000000493e78a70") // Note: 00000004 is the length of 1 log within function logs -// bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; -// // Prefix logs with length of kernel logs (12) and length of iteration 1 logs (8) -// bytes memory encodedLogs = abi.encodePacked(hex"0000000c00000008", firstFunctionCallLogs); -// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - -// // Zero because this is the first iteration -// bytes32 previousKernelPublicInputsLogsHash = bytes32(0); -// bytes32 privateCircuitPublicInputsLogsHashFirstCall = sha256(firstFunctionCallLogs); - -// bytes32 referenceLogsHash = sha256( -// abi.encodePacked( -// previousKernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHashFirstCall -// ) -// ); - -// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); -// assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); -// } - -// function testComputeKernelLogs2Iterations() public { -// // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS || -// // K_LOGS_LEN = 4 + 8 + 4 + 20 = 36 (hex"00000024") -// // I1_LOGS_LEN = 8 (hex"00000008") -// // I1_LOGS = 8 random bytes (hex"0000000493e78a70") -// // I2_LOGS_LEN = 20 (hex"00000014") -// // I2_LOGS = 20 bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") -// bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; -// bytes memory secondFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; -// bytes memory encodedLogs = abi.encodePacked( -// hex"0000002400000008", firstFunctionCallLogs, hex"00000014", secondFunctionCallLogs -// ); -// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - -// bytes32 referenceLogsHashFromIteration1 = -// sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); - -// bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); - -// bytes32 referenceLogsHashFromIteration2 = sha256( -// abi.encodePacked( -// referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall -// ) -// ); - -// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); -// assertEq(logsHash, referenceLogsHashFromIteration2, "Incorrect logs hash"); -// } - -// function testComputeKernelLogsMiddleIterationWithoutLogs() public { -// // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS | I3_LOGS_LEN | I3_LOGS || -// // K_LOGS_LEN = 4 + 8 + 4 + 0 + 4 + 20 = 40 (hex"00000028") -// // I1_LOGS_LEN = 8 (hex"00000008") -// // I1_LOGS = 8 random bytes (hex"0000000493e78a70") -// // I2_LOGS_LEN = 0 (hex"00000000") -// // I2_LOGS = 0 bytes (hex"") -// // I3_LOGS_LEN = 20 (hex"00000014") -// // I3_LOGS = 20 random bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") -// bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; -// bytes memory secondFunctionCallLogs = hex""; -// bytes memory thirdFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; -// bytes memory encodedLogs = abi.encodePacked( -// hex"0000002800000008", -// firstFunctionCallLogs, -// hex"00000000", -// secondFunctionCallLogs, -// hex"00000014", -// thirdFunctionCallLogs -// ); -// (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); - -// bytes32 referenceLogsHashFromIteration1 = -// sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); - -// bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); - -// bytes32 referenceLogsHashFromIteration2 = sha256( -// abi.encodePacked( -// referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall -// ) -// ); - -// bytes32 privateCircuitPublicInputsLogsHashThirdCall = sha256(thirdFunctionCallLogs); - -// bytes32 referenceLogsHashFromIteration3 = sha256( -// abi.encodePacked(referenceLogsHashFromIteration2, privateCircuitPublicInputsLogsHashThirdCall) -// ); - -// assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); -// assertEq(logsHash, referenceLogsHashFromIteration3, "Incorrect logs hash"); -// } -// } +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2023 Aztec Labs. +pragma solidity >=0.8.18; + +import {DecoderBase} from "./Base.sol"; + +import {Hash} from "../../src/core/libraries/Hash.sol"; +import {DataStructures} from "../../src/core/libraries/DataStructures.sol"; +import {DecoderHelper} from "../DecoderHelper.sol"; + +import {Decoder} from "../../src/core/libraries/decoders/Decoder.sol"; +import {HeaderDecoder} from "../../src/core/libraries/decoders/HeaderDecoder.sol"; +import {MessagesDecoder} from "../../src/core/libraries/decoders/MessagesDecoder.sol"; +import {TxsDecoder} from "../../src/core/libraries/decoders/TxsDecoder.sol"; + +import {AvailabilityOracle} from "../../src/core/availability_oracle/AvailabilityOracle.sol"; + +contract HeaderDecoderHelper { + // A wrapper used such that we get "calldata" and not memory + function decode(bytes calldata _header) + public + pure + returns (uint256 l2BlockNumber, bytes32 startStateHash, bytes32 endStateHash) + { + return HeaderDecoder.decode(_header); + } +} + +contract MessagesDecoderHelper { + // A wrapper used such that we get "calldata" and not memory + function decode(bytes calldata _header) + public + pure + returns ( + bytes32 l1ToL2MsgsHash, + bytes32 l2ToL1MsgsHash, + bytes32[] memory l1ToL2Msgs, + bytes32[] memory l2ToL1Msgs + ) + { + return MessagesDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); + } +} + +contract TxsDecoderHelper { + // A wrapper used such that we get "calldata" and not memory + function decode(bytes calldata _header) public pure returns (bytes32 txsHash) { + return TxsDecoder.decode(_header[HeaderDecoder.BLOCK_HEADER_SIZE:]); + } +} + +/** + * Blocks are generated using the `integration_l1_publisher.test.ts` tests. + * Main use of these test is shorter cycles when updating the decoder contract. + * All tests here are skipped (all tests are prefixed with an underscore)! + * This is because we implicitly test the decoding in integration_l1_publisher.test.ts + */ +contract DecoderTest is DecoderBase { + DecoderHelper internal helper; + HeaderDecoderHelper internal headerHelper; + MessagesDecoderHelper internal messagesHelper; + TxsDecoderHelper internal txsHelper; + + function setUp() public virtual { + helper = new DecoderHelper(); + headerHelper = new HeaderDecoderHelper(); + messagesHelper = new MessagesDecoderHelper(); + txsHelper = new TxsDecoderHelper(); + } + + function testDecodeBlocks() public { + _testDecodeBlock("mixed_block_0"); + _testDecodeBlock("mixed_block_1"); + _testDecodeBlock("empty_block_0"); + _testDecodeBlock("empty_block_1"); + } + + function _testDecodeBlock(string memory name) public virtual { + DecoderBase.Full memory data = load(name); + + // Using the FULL decoder. + ( + uint256 l2BlockNumber, + bytes32 startStateHash, + bytes32 endStateHash, + bytes32 publicInputsHash, + bytes32[] memory l2ToL1Msgs, + bytes32[] memory l1ToL2Msgs + ) = helper.decode(data.block.body); + (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = + helper.computeDiffRootAndMessagesHash(data.block.body); + + // Header + { + (uint256 headerL2BlockNumber, bytes32 headerStartStateHash, bytes32 headerEndStateHash) = + headerHelper.decode(data.block.body); + + assertEq(l2BlockNumber, data.block.blockNumber, "Invalid block number"); + assertEq(headerL2BlockNumber, data.block.blockNumber, "Invalid block number"); + assertEq(startStateHash, data.block.startStateHash, "Invalid start state hash"); + assertEq(headerStartStateHash, data.block.startStateHash, "Invalid start state hash"); + assertEq(endStateHash, data.block.endStateHash, "Invalid end state hash"); + assertEq(headerEndStateHash, data.block.endStateHash, "Invalid end state hash"); + } + + // Messages + { + ( + bytes32 msgsInHash, + bytes32 msgsL2ToL1MsgsHash, + bytes32[] memory msgsL1ToL2Msgs, + bytes32[] memory msgsL2ToL1Msgs + ) = messagesHelper.decode(data.block.body); + + assertEq(msgsInHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash msgs"); + assertEq(l1ToL2MessagesHash, data.block.l1ToL2MessagesHash, "Invalid l1ToL2MsgsHash full"); + + // assertEq(msgsL2ToL1MsgsHash, b.l2ToL1MessagesHash, "Invalid l2ToL1MsgsHash"); + + // L1 -> L2 messages + assertEq( + msgsL1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length" + ); + assertEq(l1ToL2Msgs.length, data.messages.l1ToL2Messages.length, "Invalid l1ToL2Msgs length"); + for (uint256 i = 0; i < msgsL1ToL2Msgs.length; i++) { + assertEq(msgsL1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs messages"); + assertEq(l1ToL2Msgs[i], data.messages.l1ToL2Messages[i], "Invalid l1ToL2Msgs full"); + } + + // L2 -> L1 messages + assertEq( + msgsL2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length" + ); + assertEq(l2ToL1Msgs.length, data.messages.l2ToL1Messages.length, "Invalid l2ToL1Msgs length"); + for (uint256 i = 0; i < msgsL2ToL1Msgs.length; i++) { + assertEq(msgsL2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs messages"); + assertEq(l2ToL1Msgs[i], data.messages.l2ToL1Messages[i], "Invalid l2ToL1Msgs full"); + } + } + + // Txs + { + bytes32 txsHash = txsHelper.decode(data.block.body); + assertEq(txsHash, data.block.calldataHash, "Invalid txs hash"); + assertEq(diffRoot, data.block.calldataHash, "Invalid diff root/calldata hash"); + } + + // The public inputs are computed based of these values, but not directly part of the decoding per say. + } + + function testComputeKernelLogsIterationWithoutLogs() public { + bytes memory kernelLogsLength = hex"00000004"; // 4 bytes containing value 4 + bytes memory iterationLogsLength = hex"00000000"; // 4 empty bytes indicating that length of this iteration's logs is 0 + bytes memory encodedLogs = abi.encodePacked(kernelLogsLength, iterationLogsLength); + + (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + + bytes32 kernelPublicInputsLogsHash = bytes32(0); + bytes32 privateCircuitPublicInputsLogsHash = sha256(new bytes(0)); + + bytes32 referenceLogsHash = + sha256(abi.encodePacked(kernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHash)); + + assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); + assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); + } + + function testComputeKernelLogs1Iteration() public { + // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS || + // K_LOGS_LEN = 4 + 8 = 12 (hex"0000000c") + // I1_LOGS_LEN = 8 (hex"00000008") + // I1_LOGS = 8 bytes (hex"0000000493e78a70") // Note: 00000004 is the length of 1 log within function logs + bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; + // Prefix logs with length of kernel logs (12) and length of iteration 1 logs (8) + bytes memory encodedLogs = abi.encodePacked(hex"0000000c00000008", firstFunctionCallLogs); + (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + + // Zero because this is the first iteration + bytes32 previousKernelPublicInputsLogsHash = bytes32(0); + bytes32 privateCircuitPublicInputsLogsHashFirstCall = sha256(firstFunctionCallLogs); + + bytes32 referenceLogsHash = sha256( + abi.encodePacked( + previousKernelPublicInputsLogsHash, privateCircuitPublicInputsLogsHashFirstCall + ) + ); + + assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); + assertEq(logsHash, referenceLogsHash, "Incorrect logs hash"); + } + + function testComputeKernelLogs2Iterations() public { + // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS || + // K_LOGS_LEN = 4 + 8 + 4 + 20 = 36 (hex"00000024") + // I1_LOGS_LEN = 8 (hex"00000008") + // I1_LOGS = 8 random bytes (hex"0000000493e78a70") + // I2_LOGS_LEN = 20 (hex"00000014") + // I2_LOGS = 20 bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") + bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; + bytes memory secondFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; + bytes memory encodedLogs = abi.encodePacked( + hex"0000002400000008", firstFunctionCallLogs, hex"00000014", secondFunctionCallLogs + ); + (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + + bytes32 referenceLogsHashFromIteration1 = + sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); + + bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); + + bytes32 referenceLogsHashFromIteration2 = sha256( + abi.encodePacked( + referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall + ) + ); + + assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); + assertEq(logsHash, referenceLogsHashFromIteration2, "Incorrect logs hash"); + } + + function testComputeKernelLogsMiddleIterationWithoutLogs() public { + // || K_LOGS_LEN | I1_LOGS_LEN | I1_LOGS | I2_LOGS_LEN | I2_LOGS | I3_LOGS_LEN | I3_LOGS || + // K_LOGS_LEN = 4 + 8 + 4 + 0 + 4 + 20 = 40 (hex"00000028") + // I1_LOGS_LEN = 8 (hex"00000008") + // I1_LOGS = 8 random bytes (hex"0000000493e78a70") + // I2_LOGS_LEN = 0 (hex"00000000") + // I2_LOGS = 0 bytes (hex"") + // I3_LOGS_LEN = 20 (hex"00000014") + // I3_LOGS = 20 random bytes (hex"0000001006a86173c86c6d3f108eefc36e7fb014") + bytes memory firstFunctionCallLogs = hex"0000000493e78a70"; + bytes memory secondFunctionCallLogs = hex""; + bytes memory thirdFunctionCallLogs = hex"0000001006a86173c86c6d3f108eefc36e7fb014"; + bytes memory encodedLogs = abi.encodePacked( + hex"0000002800000008", + firstFunctionCallLogs, + hex"00000000", + secondFunctionCallLogs, + hex"00000014", + thirdFunctionCallLogs + ); + (bytes32 logsHash, uint256 bytesAdvanced) = helper.computeKernelLogsHash(encodedLogs); + + bytes32 referenceLogsHashFromIteration1 = + sha256(abi.encodePacked(bytes32(0), sha256(firstFunctionCallLogs))); + + bytes32 privateCircuitPublicInputsLogsHashSecondCall = sha256(secondFunctionCallLogs); + + bytes32 referenceLogsHashFromIteration2 = sha256( + abi.encodePacked( + referenceLogsHashFromIteration1, privateCircuitPublicInputsLogsHashSecondCall + ) + ); + + bytes32 privateCircuitPublicInputsLogsHashThirdCall = sha256(thirdFunctionCallLogs); + + bytes32 referenceLogsHashFromIteration3 = sha256( + abi.encodePacked(referenceLogsHashFromIteration2, privateCircuitPublicInputsLogsHashThirdCall) + ); + + assertEq(bytesAdvanced, encodedLogs.length, "Advanced by an incorrect number of bytes"); + assertEq(logsHash, referenceLogsHashFromIteration3, "Incorrect logs hash"); + } +} From 0befd671b6a597164b1c46d9fec86c3b74f68ebc Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 15 Jan 2024 10:32:29 +0000 Subject: [PATCH 9/9] WIP --- l1-contracts/test/decoders/Base.sol | 13 ++++--- l1-contracts/test/decoders/Decoder.t.sol | 38 +++++++++---------- .../src/integration_l1_publisher.test.ts | 19 +++++++--- 3 files changed, 39 insertions(+), 31 deletions(-) diff --git a/l1-contracts/test/decoders/Base.sol b/l1-contracts/test/decoders/Base.sol index ee95efe7a46f..266abeabe062 100644 --- a/l1-contracts/test/decoders/Base.sol +++ b/l1-contracts/test/decoders/Base.sol @@ -24,14 +24,15 @@ contract DecoderBase is Test { } struct Data { + // TODO(benejsan): Use HeaderDecoder.Header here? + uint256 chainId; + uint256 version; uint256 blockNumber; - bytes body; - bytes32 calldataHash; - bytes32 endStateHash; - bytes32 l1ToL2MessagesHash; - bytes32 publicInputsHash; - bytes32 startStateHash; uint256 timestamp; + bytes32 lastArchive; + bytes32 archive; + bytes header; + bytes body; } function load(string memory name) public view returns (Full memory) { diff --git a/l1-contracts/test/decoders/Decoder.t.sol b/l1-contracts/test/decoders/Decoder.t.sol index c846f12a29a1..94319fca5531 100644 --- a/l1-contracts/test/decoders/Decoder.t.sol +++ b/l1-contracts/test/decoders/Decoder.t.sol @@ -20,7 +20,7 @@ contract HeaderDecoderHelper { function decode(bytes calldata _header) public pure - returns (uint256 l2BlockNumber, bytes32 startStateHash, bytes32 endStateHash) + returns (HeaderDecoder.Header memory) { return HeaderDecoder.decode(_header); } @@ -78,29 +78,29 @@ contract DecoderTest is DecoderBase { function _testDecodeBlock(string memory name) public virtual { DecoderBase.Full memory data = load(name); - // Using the FULL decoder. - ( - uint256 l2BlockNumber, - bytes32 startStateHash, - bytes32 endStateHash, - bytes32 publicInputsHash, - bytes32[] memory l2ToL1Msgs, - bytes32[] memory l1ToL2Msgs - ) = helper.decode(data.block.body); - (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = - helper.computeDiffRootAndMessagesHash(data.block.body); + // // Using the FULL decoder. + // ( + // uint256 l2BlockNumber, + // bytes32 startStateHash, + // bytes32 endStateHash, + // bytes32 publicInputsHash, + // bytes32[] memory l2ToL1Msgs, + // bytes32[] memory l1ToL2Msgs + // ) = helper.decode(data.block.body); + // (bytes32 diffRoot, bytes32 l1ToL2MessagesHash) = + // helper.computeDiffRootAndMessagesHash(data.block.body); // Header { - (uint256 headerL2BlockNumber, bytes32 headerStartStateHash, bytes32 headerEndStateHash) = + HeaderDecoder.Header memory header = headerHelper.decode(data.block.body); - assertEq(l2BlockNumber, data.block.blockNumber, "Invalid block number"); - assertEq(headerL2BlockNumber, data.block.blockNumber, "Invalid block number"); - assertEq(startStateHash, data.block.startStateHash, "Invalid start state hash"); - assertEq(headerStartStateHash, data.block.startStateHash, "Invalid start state hash"); - assertEq(endStateHash, data.block.endStateHash, "Invalid end state hash"); - assertEq(headerEndStateHash, data.block.endStateHash, "Invalid end state hash"); + assertEq(header.chainId, data.block.chainId, "Invalid chain Id"); + assertEq(header.version, data.block.version, "Invalid version"); + assertEq(header.blockNumber, data.block.blockNumber, "Invalid block number"); + assertEq(header.timestamp, data.block.timestamp, "Invalid timestamp"); + assertEq(header.lastArchive, data.block.lastArchive, "Invalid last archive"); + // assertEq(header.archive, data.block.archive, "Invalid archive"); } // Messages diff --git a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts index 879279a20978..3af1bfd9e9f1 100644 --- a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts @@ -264,14 +264,21 @@ describe('L1Publisher integration', () => { l2ToL1Messages: block.newL2ToL1Msgs.map(m => `0x${m.toBuffer().toString('hex').padStart(64, '0')}`), }, block: { + // The json formatting in forge is a bit brittle, so we convert Fr to a number in the few values bellow. + // This should not be a problem for testing as long as the values are not larger than u32. + chainId: Number(block.header.globalVariables.chainId.toBigInt()), + version: Number(block.header.globalVariables.version.toBigInt()), blockNumber: block.number, - startStateHash: `0x${block.getStartStateHash().toString('hex').padStart(64, '0')}`, - endStateHash: `0x${block.getEndStateHash().toString('hex').padStart(64, '0')}`, - publicInputsHash: `0x${block.getPublicInputsHash().toBuffer().toString('hex').padStart(64, '0')}`, - calldataHash: `0x${block.getCalldataHash().toString('hex').padStart(64, '0')}`, - l1ToL2MessagesHash: `0x${block.getL1ToL2MessagesHash().toString('hex').padStart(64, '0')}`, + timestamp: Number(block.header.globalVariables.timestamp.toBigInt()), + lastArchive: `0x${block.header.lastArchive.root.toBuffer().toString('hex').padStart(64, '0')}`, + header: `0x${block.bodyToBuffer().toString('hex')}`, body: `0x${block.toBufferWithLogs().toString('hex')}`, - timestamp: Number(block.header.globalVariables.timestamp.toBigInt()), // The json formatting in forge is a bit brittle, so we convert to a number here. This should not be a problem for testing as longs as the timestamp is not larger than u32. + + // startStateHash: `0x${block.getStartStateHash().toString('hex').padStart(64, '0')}`, + // endStateHash: `0x${block.getEndStateHash().toString('hex').padStart(64, '0')}`, + // publicInputsHash: `0x${block.getPublicInputsHash().toBuffer().toString('hex').padStart(64, '0')}`, + // calldataHash: `0x${block.getCalldataHash().toString('hex').padStart(64, '0')}`, + // l1ToL2MessagesHash: `0x${block.getL1ToL2MessagesHash().toString('hex').padStart(64, '0')}`, }, };