diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 3b4e2b65f662..87de3e243101 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -33,6 +33,7 @@ import {Leonidas} from "./sequencer_selection/Leonidas.sol"; contract Rollup is Leonidas, IRollup, ITestRollup { struct BlockLog { bytes32 archive; + bytes32 blockHash; uint128 slotNumber; bool isProven; } @@ -88,7 +89,8 @@ contract Rollup is Leonidas, IRollup, ITestRollup { VERSION = 1; // Genesis block - blocks[0] = BlockLog({archive: bytes32(0), slotNumber: 0, isProven: true}); + blocks[0] = + BlockLog({archive: bytes32(0), blockHash: bytes32(0), slotNumber: 0, isProven: true}); pendingBlockCount = 1; provenBlockCount = 1; } @@ -181,17 +183,19 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * * @param _header - The L2 block header * @param _archive - A root of the archive tree after the L2 block is applied + * @param _blockHash - The poseidon2 hash of the header added to the archive tree in the rollup circuit * @param _signatures - Signatures from the validators * @param _body - The body of the L2 block */ function publishAndProcess( bytes calldata _header, bytes32 _archive, + bytes32 _blockHash, SignatureLib.Signature[] memory _signatures, bytes calldata _body ) external override(IRollup) { AVAILABILITY_ORACLE.publish(_body); - process(_header, _archive, _signatures); + process(_header, _archive, _blockHash, _signatures); } /** @@ -200,19 +204,24 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * @dev `eth_log_handlers` rely on this function * @param _header - The L2 block header * @param _archive - A root of the archive tree after the L2 block is applied + * @param _blockHash - The poseidon2 hash of the header added to the archive tree in the rollup circuit * @param _body - The body of the L2 block */ - function publishAndProcess(bytes calldata _header, bytes32 _archive, bytes calldata _body) - external - override(IRollup) - { + function publishAndProcess( + bytes calldata _header, + bytes32 _archive, + bytes32 _blockHash, + bytes calldata _body + ) external override(IRollup) { AVAILABILITY_ORACLE.publish(_body); - process(_header, _archive); + process(_header, _archive, _blockHash); } /** * @notice Submit a proof for a block in the pending chain * + * @dev TODO(#7346): Verify root proofs rather than block root when batch rollups are integrated. + * * @dev Will call `_progressState` to update the proven chain. Notice this have potentially * unbounded gas consumption. * @@ -231,10 +240,11 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * * @param _header - The header of the block (should match the block in the pending chain) * @param _archive - The archive root of the block (should match the block in the pending chain) + * @param _proverId - The id of this block's prover * @param _aggregationObject - The aggregation object for the proof * @param _proof - The proof to verify */ - function submitProof( + function submitBlockRootProof( bytes calldata _header, bytes32 _archive, bytes32 _proverId, @@ -259,23 +269,59 @@ contract Rollup is Leonidas, IRollup, ITestRollup { revert Errors.Rollup__InvalidProposedArchive(expectedArchive, _archive); } - bytes32[] memory publicInputs = - new bytes32[](4 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH); - // the archive tree root - publicInputs[0] = _archive; + // TODO(#7346): Currently verifying block root proofs until batch rollups fully integrated. + // Hence the below pub inputs are BlockRootOrBlockMergePublicInputs, which are larger than + // the planned set (RootRollupPublicInputs), for the interim. + // Public inputs are not fully verified (TODO(#7373)) + + bytes32[] memory publicInputs = new bytes32[]( + Constants.BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH + ); + + // From block_root_or_block_merge_public_inputs.nr: BlockRootOrBlockMergePublicInputs. + // previous_archive.root: the previous archive tree root + publicInputs[0] = expectedLastArchive; + // previous_archive.next_available_leaf_index: the previous archive next available index + publicInputs[1] = bytes32(header.globalVariables.blockNumber); + + // new_archive.root: the new archive tree root + publicInputs[2] = expectedArchive; // this is the _next_ available leaf in the archive tree // normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed) // but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N - publicInputs[1] = bytes32(header.globalVariables.blockNumber + 1); - - publicInputs[2] = vkTreeRoot; - - bytes32[] memory headerFields = HeaderLib.toFields(header); - for (uint256 i = 0; i < headerFields.length; i++) { - publicInputs[i + 3] = headerFields[i]; + // new_archive.next_available_leaf_index: the new archive next available index + publicInputs[3] = bytes32(header.globalVariables.blockNumber + 1); + + // TODO(#7346): Currently previous block hash is unchecked, but will be checked in batch rollup (block merge -> root). + // block-building-helpers.ts is injecting as 0 for now, replicating here. + // previous_block_hash: the block hash just preceding this block (will eventually become the end_block_hash of the prev batch) + publicInputs[4] = bytes32(0); + + // end_block_hash: the current block hash (will eventually become the hash of the final block proven in a batch) + publicInputs[5] = blocks[header.globalVariables.blockNumber].blockHash; + + // For block root proof outputs, we have a block 'range' of just 1 block => start and end globals are the same + bytes32[] memory globalVariablesFields = HeaderLib.toFields(header.globalVariables); + for (uint256 i = 0; i < globalVariablesFields.length; i++) { + // start_global_variables + publicInputs[i + 6] = globalVariablesFields[i]; + // end_global_variables + publicInputs[globalVariablesFields.length + i + 6] = globalVariablesFields[i]; } + // out_hash: root of this block's l2 to l1 message tree (will eventually be root of roots) + publicInputs[24] = header.contentCommitment.outHash; + + // For block root proof outputs, we have a single recipient-value fee payment pair, + // but the struct contains space for the max (32) => we keep 31*2=62 fields blank to represent it. + // fees: array of recipient-value pairs, for a single block just one entry (will eventually be filled and paid out here) + publicInputs[25] = bytes32(uint256(uint160(header.globalVariables.coinbase))); + publicInputs[26] = bytes32(header.totalFees); + // publicInputs[27] -> publicInputs[88] left blank for empty fee array entries - publicInputs[headerFields.length + 3] = _proverId; + // vk_tree_root + publicInputs[89] = vkTreeRoot; + // prover_id: id of current block range's prover + publicInputs[90] = _proverId; // the block proof is recursive, which means it comes with an aggregation object // this snippet copies it into the public inputs needed for verification @@ -286,7 +332,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { assembly { part := calldataload(add(_aggregationObject.offset, mul(i, 32))) } - publicInputs[i + 4 + Constants.HEADER_LENGTH] = part; + publicInputs[i + 91] = part; } if (!verifier.verify(_proof, publicInputs)) { @@ -327,11 +373,13 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * * @param _header - The L2 block header * @param _archive - A root of the archive tree after the L2 block is applied + * @param _blockHash - The poseidon2 hash of the header added to the archive tree in the rollup circuit * @param _signatures - Signatures from the validators */ function process( bytes calldata _header, bytes32 _archive, + bytes32 _blockHash, SignatureLib.Signature[] memory _signatures ) public override(IRollup) { // Decode and validate header @@ -343,6 +391,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { // the slot number to uint128 blocks[pendingBlockCount++] = BlockLog({ archive: _archive, + blockHash: _blockHash, slotNumber: uint128(header.globalVariables.slotNumber), isProven: false }); @@ -385,10 +434,14 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * * @param _header - The L2 block header * @param _archive - A root of the archive tree after the L2 block is applied + * @param _blockHash - The poseidon2 hash of the header added to the archive tree in the rollup circuit */ - function process(bytes calldata _header, bytes32 _archive) public override(IRollup) { + function process(bytes calldata _header, bytes32 _archive, bytes32 _blockHash) + public + override(IRollup) + { SignatureLib.Signature[] memory emptySignatures = new SignatureLib.Signature[](0); - process(_header, _archive, emptySignatures); + process(_header, _archive, _blockHash, emptySignatures); } /** diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 60d2c090dfd5..a2f76ee511d8 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -29,19 +29,25 @@ interface IRollup { function publishAndProcess( bytes calldata _header, bytes32 _archive, + bytes32 _blockHash, SignatureLib.Signature[] memory _signatures, bytes calldata _body ) external; - function publishAndProcess(bytes calldata _header, bytes32 _archive, bytes calldata _body) - external; - function process(bytes calldata _header, bytes32 _archive) external; + function publishAndProcess( + bytes calldata _header, + bytes32 _archive, + bytes32 _blockHash, + bytes calldata _body + ) external; + function process(bytes calldata _header, bytes32 _archive, bytes32 _blockHash) external; function process( bytes calldata _header, bytes32 _archive, + bytes32 _blockHash, SignatureLib.Signature[] memory _signatures ) external; - function submitProof( + function submitBlockRootProof( bytes calldata _header, bytes32 _archive, bytes32 _proverId, @@ -49,6 +55,18 @@ interface IRollup { bytes calldata _proof ) external; + // TODO(#7346): Integrate batch rollups + // function submitRootProof( + // bytes32 _previousArchive, + // bytes32 _archive, + // bytes32 outHash, + // address[32] calldata coinbases, + // uint256[32] calldata fees, + // bytes32 _proverId, + // bytes calldata _aggregationObject, + // bytes calldata _proof + // ) external; + function archive() external view returns (bytes32); function isBlockProven(uint256 _blockNumber) external view returns (bool); function archiveAt(uint256 _blockNumber) external view returns (bytes32); diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index fc5f57b5d914..e5b0a08205f4 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -91,7 +91,9 @@ library Constants { uint256 internal constant ROOT_PARITY_INDEX = 19; uint256 internal constant BASE_ROLLUP_INDEX = 20; uint256 internal constant MERGE_ROLLUP_INDEX = 21; - uint256 internal constant ROOT_ROLLUP_INDEX = 22; + uint256 internal constant BLOCK_ROOT_ROLLUP_INDEX = 22; + uint256 internal constant BLOCK_MERGE_ROLLUP_INDEX = 23; + uint256 internal constant ROOT_ROLLUP_INDEX = 24; uint256 internal constant FUNCTION_SELECTOR_NUM_BYTES = 4; uint256 internal constant ARGS_HASH_CHUNK_LENGTH = 16; uint256 internal constant ARGS_HASH_CHUNK_COUNT = 16; @@ -197,6 +199,7 @@ library Constants { uint256 internal constant KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 663; uint256 internal constant CONSTANT_ROLLUP_DATA_LENGTH = 12; uint256 internal constant BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 29; + uint256 internal constant BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH = 91; uint256 internal constant GET_NOTES_ORACLE_RETURN_LENGTH = 674; uint256 internal constant NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP = 2048; uint256 internal constant NULLIFIERS_NUM_BYTES_PER_BASE_ROLLUP = 2048; diff --git a/l1-contracts/src/core/libraries/HeaderLib.sol b/l1-contracts/src/core/libraries/HeaderLib.sol index 47a222bf86a5..0f9cb4271822 100644 --- a/l1-contracts/src/core/libraries/HeaderLib.sol +++ b/l1-contracts/src/core/libraries/HeaderLib.sol @@ -203,4 +203,33 @@ library HeaderLib { return fields; } + + // TODO(#7346): Currently using the below to verify block root proofs until batch rollups fully integrated. + // Once integrated, remove the below fn (not used anywhere else). + function toFields(GlobalVariables memory _globalVariables) + internal + pure + returns (bytes32[] memory) + { + bytes32[] memory fields = new bytes32[](Constants.GLOBAL_VARIABLES_LENGTH); + + fields[0] = bytes32(_globalVariables.chainId); + fields[1] = bytes32(_globalVariables.version); + fields[2] = bytes32(_globalVariables.blockNumber); + fields[3] = bytes32(_globalVariables.slotNumber); + fields[4] = bytes32(_globalVariables.timestamp); + fields[5] = bytes32(uint256(uint160(_globalVariables.coinbase))); + fields[6] = bytes32(_globalVariables.feeRecipient); + fields[7] = bytes32(_globalVariables.gasFees.feePerDaGas); + fields[8] = bytes32(_globalVariables.gasFees.feePerL2Gas); + + // fail if the header structure has changed without updating this function + if (fields.length != Constants.GLOBAL_VARIABLES_LENGTH) { + // TODO(Miranda): Temporarily using this method and below error while block-root proofs are verified + // When we verify root proofs, this method can be removed => no need for separate named error + revert Errors.HeaderLib__InvalidHeaderSize(Constants.HEADER_LENGTH, fields.length); + } + + return fields; + } } diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index d6f517118589..7023439b8457 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -85,7 +85,7 @@ contract RollupTest is DecoderBase { _testBlock("mixed_block_1", false); uint256 currentSlot = rollup.getCurrentSlot(); - (, uint128 slot,) = rollup.blocks(1); + (,, uint128 slot,) = rollup.blocks(1); uint256 prunableAt = uint256(slot) + rollup.TIMELINESS_PROVING_IN_SLOTS(); vm.expectRevert( @@ -103,7 +103,7 @@ contract RollupTest is DecoderBase { // Even if we end up reverting block 1, we should still see the same root in the inbox. bytes32 inboxRoot2 = inbox.trees(2).root(); - (, uint128 slot,) = rollup.blocks(1); + (,, uint128 slot,) = rollup.blocks(1); uint256 prunableAt = uint256(slot) + rollup.TIMELINESS_PROVING_IN_SLOTS(); uint256 timeOfPrune = rollup.getTimestampForSlot(prunableAt); @@ -176,7 +176,7 @@ contract RollupTest is DecoderBase { feeAmount ) ); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); address coinbase = data.decodedHeader.globalVariables.coinbase; uint256 coinbaseBalance = portalERC20.balanceOf(coinbase); @@ -184,7 +184,7 @@ contract RollupTest is DecoderBase { portalERC20.mint(address(feeJuicePortal), feeAmount - portalBalance); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); assertEq(portalERC20.balanceOf(coinbase), feeAmount, "invalid coinbase balance"); } @@ -244,7 +244,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidBlockNumber.selector, 1, 0x420)); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); } function testRevertInvalidChainId() public setUpFor("empty_block_1") { @@ -261,7 +261,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidChainId.selector, 31337, 0x420)); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); } function testRevertInvalidVersion() public setUpFor("empty_block_1") { @@ -277,7 +277,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidVersion.selector, 1, 0x420)); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); } function testRevertInvalidTimestamp() public setUpFor("empty_block_1") { @@ -298,7 +298,7 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidTimestamp.selector, realTs, badTs)); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); } function testBlocksWithAssumeProven() public setUpFor("mixed_block_1") { @@ -325,7 +325,7 @@ contract RollupTest is DecoderBase { bytes32 archive = data.archive; vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__TryingToProveNonExistingBlock.selector)); - rollup.submitProof(header, archive, bytes32(0), "", ""); + rollup.submitBlockRootProof(header, archive, bytes32(0), "", ""); } function testSubmitProofInvalidArchive() public setUpFor("empty_block_1") { @@ -346,7 +346,7 @@ contract RollupTest is DecoderBase { Errors.Rollup__InvalidArchive.selector, rollup.archiveAt(1), 0xdeadbeef ) ); - rollup.submitProof(header, archive, bytes32(0), "", ""); + rollup.submitBlockRootProof(header, archive, bytes32(0), "", ""); } function testSubmitProofInvalidProposedArchive() public setUpFor("empty_block_1") { @@ -361,7 +361,7 @@ contract RollupTest is DecoderBase { vm.expectRevert( abi.encodeWithSelector(Errors.Rollup__InvalidProposedArchive.selector, archive, badArchive) ); - rollup.submitProof(header, badArchive, bytes32(0), "", ""); + rollup.submitBlockRootProof(header, badArchive, bytes32(0), "", ""); } function _testBlock(string memory name, bool _submitProof) public { @@ -394,10 +394,10 @@ contract RollupTest is DecoderBase { availabilityOracle.publish(body); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); if (_submitProof) { - rollup.submitProof(header, archive, bytes32(0), "", ""); + rollup.submitBlockRootProof(header, archive, bytes32(0), "", ""); assertTrue( rollup.isBlockProven(full.block.decodedHeader.globalVariables.blockNumber), diff --git a/l1-contracts/test/sparta/DevNet.t.sol b/l1-contracts/test/sparta/DevNet.t.sol index 3e0ec8e76794..ff0907ee642f 100644 --- a/l1-contracts/test/sparta/DevNet.t.sol +++ b/l1-contracts/test/sparta/DevNet.t.sol @@ -166,7 +166,7 @@ contract DevNetTest is DecoderBase { } vm.prank(ree.proposer); - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); assertEq(_expectRevert, ree.shouldRevert, "Invalid revert expectation"); diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 01b36aa665df..c48ba0bf3a2e 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -239,13 +239,13 @@ contract SpartaTest is DecoderBase { } vm.prank(ree.proposer); - rollup.process(header, archive, signatures); + rollup.process(header, archive, bytes32(0), signatures); if (ree.shouldRevert) { return; } } else { - rollup.process(header, archive); + rollup.process(header, archive, bytes32(0)); } assertEq(_expectRevert, ree.shouldRevert, "Invalid revert expectation"); diff --git a/noir-projects/noir-protocol-circuits/Nargo.template.toml b/noir-projects/noir-protocol-circuits/Nargo.template.toml index 0ff8fc258ef4..d30541738e1d 100644 --- a/noir-projects/noir-protocol-circuits/Nargo.template.toml +++ b/noir-projects/noir-protocol-circuits/Nargo.template.toml @@ -34,5 +34,7 @@ members = [ "crates/rollup-merge", "crates/rollup-base", "crates/rollup-base-simulated", + "crates/rollup-block-merge", + "crates/rollup-block-root", "crates/rollup-root", ] diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-merge/Nargo.toml b/noir-projects/noir-protocol-circuits/crates/rollup-block-merge/Nargo.toml new file mode 100644 index 000000000000..4f0c5ab27f79 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-block-merge/Nargo.toml @@ -0,0 +1,9 @@ +[package] +name = "rollup_block_merge" +type = "bin" +authors = [""] +compiler_version = ">=0.18.0" + +[dependencies] +rollup_lib = { path = "../rollup-lib" } +types = { path = "../types" } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-merge/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-block-merge/src/main.nr new file mode 100644 index 000000000000..9347def85338 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-block-merge/src/main.nr @@ -0,0 +1,5 @@ +use dep::rollup_lib::block_merge::{BlockMergeRollupInputs, BlockRootOrBlockMergePublicInputs}; + +fn main(inputs: BlockMergeRollupInputs) -> pub BlockRootOrBlockMergePublicInputs { + inputs.block_merge_rollup_circuit() +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-root/Nargo.toml b/noir-projects/noir-protocol-circuits/crates/rollup-block-root/Nargo.toml new file mode 100644 index 000000000000..df531661daba --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-block-root/Nargo.toml @@ -0,0 +1,9 @@ +[package] +name = "rollup_block_root" +type = "bin" +authors = [""] +compiler_version = ">=0.18.0" + +[dependencies] +rollup_lib = { path = "../rollup-lib" } +types = { path = "../types" } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-root/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-block-root/src/main.nr new file mode 100644 index 000000000000..d5e7a5e691db --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-block-root/src/main.nr @@ -0,0 +1,5 @@ +use dep::rollup_lib::block_root::{BlockRootRollupInputs, BlockRootOrBlockMergePublicInputs}; + +fn main(inputs: BlockRootRollupInputs) -> pub BlockRootOrBlockMergePublicInputs { + inputs.block_root_rollup_circuit() +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/block_root_or_block_merge_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/block_root_or_block_merge_public_inputs.nr new file mode 100644 index 000000000000..5ccb79061fd6 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/block_root_or_block_merge_public_inputs.nr @@ -0,0 +1,141 @@ +use dep::types::{ + abis::{append_only_tree_snapshot::AppendOnlyTreeSnapshot, global_variables::GlobalVariables}, + constants::BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH, + partial_state_reference::PartialStateReference, traits::{Empty, Serialize, Deserialize}, + utils::reader::Reader, address::EthAddress +}; +use crate::abis::constant_rollup_data::ConstantRollupData; + +struct FeeRecipient { + recipient: EthAddress, + value: Field, +} + +// TODO(#7346): Should the default empty value be MAX_FIELD? The zero addr may be a valid recipient +impl Empty for FeeRecipient { + fn empty() -> Self { + FeeRecipient { + recipient: EthAddress::zero(), + value: 0, + } + } +} + +impl Serialize<2> for FeeRecipient { + fn serialize(self) -> [Field; 2] { + [self.recipient.to_field(), self.value] + } +} + +impl Deserialize<2> for FeeRecipient { + fn deserialize(values: [Field; 2]) -> Self { + Self { + recipient: EthAddress::from_field(values[0]), + value: values[1] + } + } +} + +impl Eq for FeeRecipient { + fn eq(self, other: Self) -> bool { + (self.recipient.eq(other.recipient)) & (self.value == other.value) + } +} + +// TODO: instead of archives + global vars, use ConstantRollupData x2? It also includes vk root +// may be confusing as new_constant.last_archive would actually be the new_archive +struct BlockRootOrBlockMergePublicInputs { + previous_archive: AppendOnlyTreeSnapshot, // Archive tree root immediately before this block range + new_archive: AppendOnlyTreeSnapshot, // Archive tree root after adding this block range + previous_block_hash: Field, // Identifier of the previous block before the range + end_block_hash: Field, // Identifier of the last block in the range + start_global_variables: GlobalVariables, // Global variables for the first block in the range + end_global_variables: GlobalVariables, // Global variables for the last block in the range + out_hash: Field, // Merkle node of the L2-to-L1 messages merkle roots in the block range + fees: [FeeRecipient; 32], // Concatenation of all coinbase and fees for the block range + vk_tree_root: Field, // Root of allowed vk tree + prover_id: Field, // TODO(#7346): Temporarily added prover_id while we verify block-root proofs on L1 +} + +impl Empty for BlockRootOrBlockMergePublicInputs { + fn empty() -> Self { + BlockRootOrBlockMergePublicInputs { + previous_archive: AppendOnlyTreeSnapshot::zero(), + new_archive: AppendOnlyTreeSnapshot::zero(), + previous_block_hash: 0, + end_block_hash: 0, + start_global_variables: GlobalVariables::empty(), + end_global_variables: GlobalVariables::empty(), + out_hash: 0, + fees: [FeeRecipient::empty(); 32], + vk_tree_root: 0, + prover_id: 0, + } + } +} + +impl Eq for BlockRootOrBlockMergePublicInputs { + fn eq(self, other: Self) -> bool { + (self.previous_archive.eq(other.previous_archive)) & + (self.new_archive.eq(other.new_archive)) & + (self.previous_block_hash == other.previous_block_hash) & + (self.end_block_hash == other.end_block_hash) & + (self.start_global_variables.eq(other.start_global_variables)) & + (self.end_global_variables.eq(other.end_global_variables)) & + (self.out_hash == other.out_hash) & + (self.fees.eq(other.fees)) & + (self.vk_tree_root == other.vk_tree_root) & + (self.prover_id == other.prover_id) + } +} + +impl Serialize for BlockRootOrBlockMergePublicInputs { + fn serialize(self) -> [Field; BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.previous_archive.serialize()); + fields.extend_from_array(self.new_archive.serialize()); + fields.push(self.previous_block_hash as Field); + fields.push(self.end_block_hash as Field); + fields.extend_from_array(self.start_global_variables.serialize()); + fields.extend_from_array(self.end_global_variables.serialize()); + fields.push(self.out_hash as Field); + for i in 0..32 { + fields.extend_from_array(self.fees[i].serialize()); + } + fields.push(self.vk_tree_root as Field); + fields.push(self.prover_id as Field); + assert_eq(fields.len(), BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH); + + fields.storage + } +} + +impl Deserialize for BlockRootOrBlockMergePublicInputs { + fn deserialize(fields: [Field; BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH]) -> BlockRootOrBlockMergePublicInputs { + let mut reader = Reader::new(fields); + let item = Self { + previous_archive: reader.read_struct(AppendOnlyTreeSnapshot::deserialize), + new_archive: reader.read_struct(AppendOnlyTreeSnapshot::deserialize), + previous_block_hash: reader.read(), + end_block_hash: reader.read(), + start_global_variables: reader.read_struct(GlobalVariables::deserialize), + end_global_variables: reader.read_struct(GlobalVariables::deserialize), + out_hash: reader.read(), + fees: reader.read_struct_array(FeeRecipient::deserialize, [FeeRecipient::empty(); 32]), + vk_tree_root: reader.read(), + prover_id: reader.read(), + }; + + reader.finish(); + item + } +} + +#[test] +fn serialization_of_empty() { + let item = BlockRootOrBlockMergePublicInputs::empty(); + let serialized = item.serialize(); + let deserialized = BlockRootOrBlockMergePublicInputs::deserialize(serialized); + assert(item.eq(deserialized)); +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/mod.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/mod.nr index fe5d8000c098..b57aeaa5fa0d 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/mod.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/mod.nr @@ -1,3 +1,5 @@ mod constant_rollup_data; mod base_or_merge_rollup_public_inputs; +mod block_root_or_block_merge_public_inputs; mod previous_rollup_data; +mod previous_rollup_block_data; diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_block_data.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_block_data.nr new file mode 100644 index 000000000000..da846f9d34ef --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_block_data.nr @@ -0,0 +1,52 @@ +use crate::abis::block_root_or_block_merge_public_inputs::BlockRootOrBlockMergePublicInputs; +use dep::types::{ + constants::VK_TREE_HEIGHT, + recursion::{proof::NestedRecursiveProof, verification_key::VerificationKey, traits::Verifiable}, + traits::Empty, merkle_tree::MembershipWitness, merkle_tree::membership::assert_check_membership, + utils::arrays::find_index_hint +}; + +struct PreviousRollupBlockData { + block_root_or_block_merge_public_inputs: BlockRootOrBlockMergePublicInputs, + proof: NestedRecursiveProof, + vk: VerificationKey, + vk_witness: MembershipWitness, +} + +impl Verifiable for PreviousRollupBlockData { + fn verify(self) { + let inputs = BlockRootOrBlockMergePublicInputs::serialize(self.block_root_or_block_merge_public_inputs); + std::verify_proof( + self.vk.key, + self.proof.fields, + inputs, + self.vk.hash + ); + } +} + +impl Empty for PreviousRollupBlockData { + fn empty() -> Self { + PreviousRollupBlockData { + block_root_or_block_merge_public_inputs: BlockRootOrBlockMergePublicInputs::empty(), + proof: NestedRecursiveProof::empty(), + vk: VerificationKey::empty(), + vk_witness: MembershipWitness::empty(), + } + } +} + +impl PreviousRollupBlockData { + fn validate_in_vk_tree(self, allowed_indices: [u32; N]) { + let leaf_index = self.vk_witness.leaf_index as u32; + let index_hint = find_index_hint(allowed_indices, |index: u32| index == leaf_index); + assert_eq(allowed_indices[index_hint], leaf_index, "Invalid vk index"); + + assert_check_membership( + self.vk.hash, + self.vk_witness.leaf_index, + self.vk_witness.sibling_path, + self.block_root_or_block_merge_public_inputs.vk_tree_root + ); + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_merge/block_merge_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_merge/block_merge_rollup_inputs.nr new file mode 100644 index 000000000000..45dc272dd8f5 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_merge/block_merge_rollup_inputs.nr @@ -0,0 +1,180 @@ +use dep::types::{traits::Empty, constants::{BLOCK_ROOT_ROLLUP_INDEX, BLOCK_MERGE_ROLLUP_INDEX}}; +use crate::abis::previous_rollup_block_data::PreviousRollupBlockData; +use crate::abis::block_root_or_block_merge_public_inputs::BlockRootOrBlockMergePublicInputs; +use crate::components; +// TODO(#7346): Currently unused! Will be used when batch rollup circuits are integrated. +global ALLOWED_PREVIOUS_CIRCUITS = [ + BLOCK_ROOT_ROLLUP_INDEX, + BLOCK_MERGE_ROLLUP_INDEX, +]; + +struct BlockMergeRollupInputs { + previous_rollup_data : [PreviousRollupBlockData; 2] +} + +impl Empty for BlockMergeRollupInputs { + fn empty() -> Self { + BlockMergeRollupInputs { + previous_rollup_data: [PreviousRollupBlockData::empty(); 2] + } + } +} + +impl BlockMergeRollupInputs { + pub fn block_merge_rollup_circuit(self) -> BlockRootOrBlockMergePublicInputs { + // TODO(Lasse): Check both previous rollup vks (in previous_rollup_block_data) against the permitted set of kernel vks. + // we don't have a set of permitted kernel vks yet. + + // Verify the previous rollup proofs + if !dep::std::runtime::is_unconstrained() { + self.previous_rollup_data[0].verify(); + // TODO(#7410) we need the tube vk to reinstate this + // self.previous_rollup_data[0].validate_in_vk_tree(ALLOWED_PREVIOUS_CIRCUITS); + + self.previous_rollup_data[1].verify(); + // TODO(#7410) we need the tube vk to reinstate this + // self.previous_rollup_data[1].validate_in_vk_tree(ALLOWED_PREVIOUS_CIRCUITS); + } + + let left = self.previous_rollup_data[0].block_root_or_block_merge_public_inputs; + let right = self.previous_rollup_data[1].block_root_or_block_merge_public_inputs; + + // TODO(#7346): Constrain block rollup structure + // For full wonkiness (no wasted proving of padding blocks for any number of blocks, but more complex hash recalculation): + // components::assert_blocks_filled_from_left(left, right); + // For variable height but balanced tree (no wasted proving only for number of blocks = power of 2, simpler hash recalculation): + // assert(left.num_blocks == right.num_blocks) + // if (num_blocks == 1) { assert(vk_witness.index == BLOCK_ROOT_ROLLUP_INDEX) } else { assert(vk_witness.index == BLOCK_MERGE_ROLLUP_INDEX)} + // ^ Where instead of num_txs, use num_blocks = (end_global_variables.block_number - start_global_variables.block_number) + 1 + + components::assert_prev_block_rollups_follow_on_from_each_other(left, right); + + let out_hash = components::compute_blocks_out_hash(self.previous_rollup_data); + + let fees = components::accumulate_blocks_fees(left, right); + + BlockRootOrBlockMergePublicInputs { + previous_archive: left.previous_archive, + new_archive: right.new_archive, + previous_block_hash: left.previous_block_hash, + end_block_hash: right.end_block_hash, + start_global_variables: left.start_global_variables, + end_global_variables: right.end_global_variables, + out_hash, + fees, + vk_tree_root: left.vk_tree_root, + prover_id: left.prover_id // TODO(#7346): Temporarily added prover_id while we verify block-root proofs on L1 + } + } +} + +mod tests { + use crate::{ + block_merge::block_merge_rollup_inputs::BlockMergeRollupInputs, + tests::block_merge_rollup_inputs::default_block_merge_rollup_inputs + }; + use dep::types::{hash::accumulate_sha256, address::EthAddress}; + use dep::types::constants::{BLOCK_ROOT_ROLLUP_INDEX, BLOCK_MERGE_ROLLUP_INDEX}; + + #[test(should_fail_with="input blocks have different chain id")] + fn constants_different_chain_id_fails() { + let mut inputs = default_block_merge_rollup_inputs(); + inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.end_global_variables.chain_id = 1; + let _output = inputs.block_merge_rollup_circuit(); + } + + #[test(should_fail_with="input blocks have different chain version")] + fn constants_different_ver_fails() { + let mut inputs = default_block_merge_rollup_inputs(); + inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.end_global_variables.version = 1; + let _output = inputs.block_merge_rollup_circuit(); + } + + #[test(should_fail_with="input blocks have different archive tree snapshots")] + fn previous_rollups_dont_follow_archive() { + let mut inputs = default_block_merge_rollup_inputs(); + inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.new_archive.root = 0; + inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.previous_archive.root = 1; + let _output = inputs.block_merge_rollup_circuit(); + } + + #[test(should_fail_with="input block hashes do not follow on from each other")] + fn previous_rollups_dont_follow_block_hash() { + let mut inputs = default_block_merge_rollup_inputs(); + inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.end_block_hash = 0; + inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.previous_block_hash = 1; + let _output = inputs.block_merge_rollup_circuit(); + } + + #[test(should_fail_with="input block numbers do not follow on from each other")] + fn previous_rollups_dont_follow_block_number() { + let mut inputs = default_block_merge_rollup_inputs(); + inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.end_global_variables.block_number = 2; + inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.start_global_variables.block_number = 1; + let _output = inputs.block_merge_rollup_circuit(); + } + + #[test] + fn out_hash() { + let mut inputs = default_block_merge_rollup_inputs(); + let expected_hash = accumulate_sha256([1, 2]); + let outputs = inputs.block_merge_rollup_circuit(); + + assert_eq(outputs.out_hash, expected_hash); + } + + #[test] + fn block_fees_are_accumulated() { + let mut inputs = default_block_merge_rollup_inputs(); + let outputs = inputs.block_merge_rollup_circuit(); + // TODO(Miranda): Uncomment below when fees are accumulated: components.nr -> accumulate_blocks_fees() + + // // Default previous rollup inputs have the same fee recipient, so they should be accumulated into one + // let expected_fee_total = inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.fees[0].value + // + inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.fees[0].value; + // assert_eq(outputs.fees[0].value, expected_fee_total); + // assert(is_empty(outputs.fees[1])); + + // inputs = default_block_merge_rollup_inputs(); + // // Force each previous rollup to have different fee recipients + // inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.fees[0].recipient = EthAddress::from_field(2); + // let outputs = inputs.block_merge_rollup_circuit(); + + assert_eq( + outputs.fees[0], inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.fees[0] + ); + assert_eq( + outputs.fees[1], inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.fees[0] + ); + } + + #[test] + fn valid_previous_circuit_block_root() { + let mut inputs = default_block_merge_rollup_inputs(); + let vk_tree = dep::types::tests::fixtures::vk_tree::get_vk_merkle_tree(); + inputs.previous_rollup_data[0].vk.hash = vk_tree.leaves[BLOCK_ROOT_ROLLUP_INDEX]; + inputs.previous_rollup_data[0].vk_witness.leaf_index = BLOCK_ROOT_ROLLUP_INDEX as Field; + inputs.previous_rollup_data[0].vk_witness.sibling_path = vk_tree.get_sibling_path(BLOCK_ROOT_ROLLUP_INDEX); + let _outputs = inputs.block_merge_rollup_circuit(); + } + + #[test] + fn valid_previous_circuit_block_merge() { + let mut inputs = default_block_merge_rollup_inputs(); + let vk_tree = dep::types::tests::fixtures::vk_tree::get_vk_merkle_tree(); + inputs.previous_rollup_data[0].vk.hash = vk_tree.leaves[BLOCK_MERGE_ROLLUP_INDEX]; + inputs.previous_rollup_data[0].vk_witness.leaf_index = BLOCK_MERGE_ROLLUP_INDEX as Field; + inputs.previous_rollup_data[0].vk_witness.sibling_path = vk_tree.get_sibling_path(BLOCK_MERGE_ROLLUP_INDEX); + let _outputs = inputs.block_merge_rollup_circuit(); + } + // TODO(#7410) we need the tube vk to reinstate this + // #[test(should_fail_with="Invalid vk index")] + // fn invalid_previous_circuit() { + // let mut inputs = default_block_merge_rollup_inputs(); + // let vk_tree = dep::types::tests::fixtures::vk_tree::get_vk_merkle_tree(); + // inputs.previous_rollup_data[0].vk.hash = vk_tree.leaves[ROOT_PARITY_INDEX]; + // inputs.previous_rollup_data[0].vk_witness.leaf_index = ROOT_PARITY_INDEX as Field; + // inputs.previous_rollup_data[0].vk_witness.sibling_path = vk_tree.get_sibling_path(ROOT_PARITY_INDEX); + // let _outputs = inputs.block_merge_rollup_circuit(); + // } +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_merge/mod.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_merge/mod.nr new file mode 100644 index 000000000000..a677abf9300e --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_merge/mod.nr @@ -0,0 +1,3 @@ +mod block_merge_rollup_inputs; +use block_merge_rollup_inputs::BlockMergeRollupInputs; +use crate::abis::block_root_or_block_merge_public_inputs::BlockRootOrBlockMergePublicInputs; diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/block_root_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/block_root_rollup_inputs.nr new file mode 100644 index 000000000000..f9cbed863134 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/block_root_rollup_inputs.nr @@ -0,0 +1,149 @@ +use crate::{ + abis::{ + previous_rollup_data::PreviousRollupData, constant_rollup_data::ConstantRollupData, + block_root_or_block_merge_public_inputs::{BlockRootOrBlockMergePublicInputs, FeeRecipient} +}, + components +}; +use parity_lib::{root::root_rollup_parity_input::RootRollupParityInput, ParityPublicInputs}; +use types::{ + abis::{append_only_tree_snapshot::AppendOnlyTreeSnapshot, nullifier_leaf_preimage::NullifierLeafPreimage}, + address::EthAddress, + constants::{ + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, L1_TO_L2_MSG_SUBTREE_HEIGHT, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, ARCHIVE_HEIGHT, BASE_ROLLUP_INDEX, MERGE_ROLLUP_INDEX +}, + header::Header, content_commitment::ContentCommitment, + merkle_tree::{append_only_tree, calculate_subtree_root, calculate_empty_tree_root}, + state_reference::StateReference, traits::Empty +}; + +global ALLOWED_PREVIOUS_CIRCUITS = [ + BASE_ROLLUP_INDEX, + MERGE_ROLLUP_INDEX, +]; + +struct BlockRootRollupInputs { + // All below are shared between the base and merge rollups + previous_rollup_data: [PreviousRollupData; 2], + + l1_to_l2_roots: RootRollupParityInput, + + // inputs required to process l1 to l2 messages + l1_to_l2_messages: [Field; NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP], + l1_to_l2_message_subtree_sibling_path: [Field; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH], + + start_l1_to_l2_message_tree_snapshot: AppendOnlyTreeSnapshot, + + // inputs required to add the block hash + start_archive_snapshot: AppendOnlyTreeSnapshot, + new_archive_sibling_path: [Field; ARCHIVE_HEIGHT], + // Added previous_block_hash to be passed through to the final root, where it will be either: + // - checked on L1 for first block in batch + // - checked against previous block_root.end_block_hash in a block_merge + previous_block_hash: Field, + // TODO(#7346): Temporarily added prover_id while we verify block-root proofs on L1 + prover_id: Field, +} + +impl BlockRootRollupInputs { + pub fn block_root_rollup_circuit(self) -> BlockRootOrBlockMergePublicInputs { + // Verify the previous rollup proofs + if !dep::std::runtime::is_unconstrained() { + self.previous_rollup_data[0].verify(); + // TODO(#7410) we need the tube vk to reinstate this + // self.previous_rollup_data[0].validate_in_vk_tree(ALLOWED_PREVIOUS_CIRCUITS); + + self.previous_rollup_data[1].verify(); + // TODO(#7410) we need the tube vk to reinstate this + // self.previous_rollup_data[1].validate_in_vk_tree(ALLOWED_PREVIOUS_CIRCUITS); + + // verify the root parity + self.l1_to_l2_roots.verify(); + // TODO(#7410) we need the tube vk to reinstate this + // self.l1_to_l2_roots.validate_in_vk_tree(); + } + + let left = self.previous_rollup_data[0].base_or_merge_rollup_public_inputs; + let right = self.previous_rollup_data[1].base_or_merge_rollup_public_inputs; + + components::assert_txs_filled_from_left(left, right); + components::assert_equal_constants(left, right); + components::assert_prev_rollups_follow_on_from_each_other(left, right); + + // Insert subtree into the l1 to l2 data tree + let empty_l1_to_l2_subtree_root = calculate_empty_tree_root(L1_TO_L2_MSG_SUBTREE_HEIGHT); + let new_l1_to_l2_message_tree_snapshot = append_only_tree::insert_subtree_to_snapshot_tree( + self.start_l1_to_l2_message_tree_snapshot, + self.l1_to_l2_message_subtree_sibling_path, + empty_l1_to_l2_subtree_root, + self.l1_to_l2_roots.public_inputs.converted_root, + // TODO(Kev): For now we can add a test that this fits inside of + // a u8. + L1_TO_L2_MSG_SUBTREE_HEIGHT as u8 + ); + + let state = StateReference { l1_to_l2_message_tree: new_l1_to_l2_message_tree_snapshot, partial: right.end }; + + let content_commitment = ContentCommitment { + num_txs: (left.num_txs + right.num_txs) as Field, + txs_effects_hash: components::compute_txs_effects_hash(self.previous_rollup_data), + in_hash: self.l1_to_l2_roots.public_inputs.sha_root, + out_hash: components::compute_out_hash(self.previous_rollup_data) + }; + + let total_fees = components::accumulate_fees(left, right); + + let header = Header { + last_archive: left.constants.last_archive, + content_commitment, + state, + global_variables: left.constants.global_variables, + total_fees + }; + + // Build the block hash for this by hashing the header and then insert the new leaf to archive tree. + let block_hash = header.hash(); + + // Update the archive + let archive = append_only_tree::insert_subtree_to_snapshot_tree( + self.start_archive_snapshot, + self.new_archive_sibling_path, + 0, + block_hash, + 0 + ); + + let mut fee_arr = [FeeRecipient::empty(); 32]; + fee_arr[0] = FeeRecipient { recipient: left.constants.global_variables.coinbase, value: total_fees }; + + BlockRootOrBlockMergePublicInputs { + previous_archive: left.constants.last_archive, // archive before this block was added + new_archive: archive, // archive once this block was added + previous_block_hash: self.previous_block_hash, + end_block_hash: block_hash, // current newest block hash = this block hash + start_global_variables: left.constants.global_variables, // we have asserted that left.constants == right.constants => ... + end_global_variables: left.constants.global_variables, // with a current block range of 1, we only have 1 set of constants + out_hash: content_commitment.out_hash, + fees: fee_arr, + vk_tree_root: left.constants.vk_tree_root, + prover_id: self.prover_id + } + } +} + +impl Empty for BlockRootRollupInputs { + fn empty() -> Self { + BlockRootRollupInputs { + previous_rollup_data: [PreviousRollupData::empty(); 2], + l1_to_l2_roots: RootRollupParityInput::empty(), + l1_to_l2_messages: [0; NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP], + l1_to_l2_message_subtree_sibling_path: [0; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH], + start_l1_to_l2_message_tree_snapshot: AppendOnlyTreeSnapshot::zero(), + start_archive_snapshot: AppendOnlyTreeSnapshot::zero(), + new_archive_sibling_path: [0; ARCHIVE_HEIGHT], + previous_block_hash: 0, + prover_id: 0, + } + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/mod.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/mod.nr new file mode 100644 index 000000000000..350e051b2fa6 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/mod.nr @@ -0,0 +1,34 @@ +mod block_root_rollup_inputs; + +// Re-exports +use block_root_rollup_inputs::BlockRootRollupInputs; +use crate::abis::block_root_or_block_merge_public_inputs::BlockRootOrBlockMergePublicInputs; + +mod tests { + use crate::tests::block_root_rollup_inputs::default_block_root_rollup_inputs; + use dep::types::hash::accumulate_sha256; + + #[test] + fn check_block_hashes_empty_blocks() { + let expected_out_hash = accumulate_sha256([1, 2]); + + let inputs = default_block_root_rollup_inputs(); + let outputs = inputs.block_root_rollup_circuit(); + + // check out hash + assert_eq(outputs.out_hash, expected_out_hash); + } + + #[test] + fn end_constants() { + let inputs = default_block_root_rollup_inputs(); + let outputs = inputs.block_root_rollup_circuit(); + + assert( + outputs.previous_archive.eq(inputs.previous_rollup_data[0].base_or_merge_rollup_public_inputs.constants.last_archive) + ); + assert( + outputs.start_global_variables.eq(inputs.previous_rollup_data[1].base_or_merge_rollup_public_inputs.constants.global_variables) + ); + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr index ce5f2b9aeebb..324d8fa40c30 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr @@ -1,5 +1,8 @@ -use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInputs; -use crate::abis::previous_rollup_data::PreviousRollupData; +use crate::abis::{ + base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInputs, + block_root_or_block_merge_public_inputs::{BlockRootOrBlockMergePublicInputs, FeeRecipient} +}; +use crate::abis::{previous_rollup_data::PreviousRollupData, previous_rollup_block_data::PreviousRollupBlockData}; use dep::types::{ hash::{ accumulate_sha256, silo_unencrypted_log_hash, compute_tx_logs_hash, silo_encrypted_log_hash, @@ -11,7 +14,7 @@ use dep::types::{ MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PROTOCOL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - utils::{uint256::U256, arrays::array_length}, + utils::{uint256::U256, arrays::{array_length, array_merge}}, abis::{ append_only_tree_snapshot::AppendOnlyTreeSnapshot, accumulated_data::CombinedAccumulatedData, public_data_update_request::PublicDataUpdateRequest, log_hash::{LogHash, ScopedLogHash} @@ -22,10 +25,7 @@ use dep::types::{ * Asserts that the tree formed by rollup circuits is filled greedily from L to R * */ -pub fn assert_txs_filled_from_left( - left: BaseOrMergeRollupPublicInputs, - right: BaseOrMergeRollupPublicInputs -) { +pub fn assert_txs_filled_from_left(left: BaseOrMergeRollupPublicInputs, right: BaseOrMergeRollupPublicInputs) { // assert that the left rollup is either a base (1 tx) or a balanced tree (num txs = power of 2) if (left.rollup_type == 1) { let left_txs = left.num_txs; @@ -48,10 +48,7 @@ pub fn assert_txs_filled_from_left( * Asserts that the constants used in the left and right child are identical * */ -pub fn assert_equal_constants( - left: BaseOrMergeRollupPublicInputs, - right: BaseOrMergeRollupPublicInputs -) { +pub fn assert_equal_constants(left: BaseOrMergeRollupPublicInputs, right: BaseOrMergeRollupPublicInputs) { assert(left.constants.eq(right.constants), "input proofs have different constants"); } @@ -72,13 +69,50 @@ pub fn assert_prev_rollups_follow_on_from_each_other( ); } -pub fn accumulate_fees( - left: BaseOrMergeRollupPublicInputs, - right: BaseOrMergeRollupPublicInputs -) -> Field { +// TODO(Miranda): split out? +pub fn assert_prev_block_rollups_follow_on_from_each_other( + left: BlockRootOrBlockMergePublicInputs, + right: BlockRootOrBlockMergePublicInputs +) { + assert(left.vk_tree_root == right.vk_tree_root, "input blocks have different vk tree roots"); + assert( + left.new_archive.eq(right.previous_archive), "input blocks have different archive tree snapshots" + ); + assert( + left.end_block_hash.eq(right.previous_block_hash), "input block hashes do not follow on from each other" + ); + assert( + left.end_global_variables.chain_id == right.start_global_variables.chain_id, "input blocks have different chain id" + ); + assert( + left.end_global_variables.version == right.start_global_variables.version, "input blocks have different chain version" + ); + assert( + left.end_global_variables.block_number + 1 == right.start_global_variables.block_number, "input block numbers do not follow on from each other" + ); + assert( + left.end_global_variables.timestamp < right.start_global_variables.timestamp, "input block timestamps do not follow on from each other" + ); +} + +pub fn accumulate_fees(left: BaseOrMergeRollupPublicInputs, right: BaseOrMergeRollupPublicInputs) -> Field { left.accumulated_fees + right.accumulated_fees } +pub fn accumulate_blocks_fees( + left: BlockRootOrBlockMergePublicInputs, + right: BlockRootOrBlockMergePublicInputs +) -> [FeeRecipient; 32] { + let left_len = array_length(left.fees); + let right_len = array_length(right.fees); + assert(left_len + right_len <= 32, "too many fee payment structs accumulated in rollup"); + // TODO(Miranda): combine fees with same recipient depending on rollup structure + // Assuming that the final rollup tree (block root -> block merge -> root) has max 32 leaves (TODO: constrain in root), then + // in the worst case, we would be checking the left 16 values (left_len = 16) against the right 16 (right_len = 16). + // Either way, construct arr in unconstrained and make use of hints to point to merged fee array. + array_merge(left.fees, right.fees) +} + /** * @brief From two previous rollup data, compute a single out hash * @@ -93,6 +127,15 @@ pub fn compute_out_hash(previous_rollup_data: [PreviousRollupData; 2]) -> Field ] ) } +// TODO(Miranda): combine fns? +pub fn compute_blocks_out_hash(previous_rollup_data: [PreviousRollupBlockData; 2]) -> Field { + accumulate_sha256( + [ + previous_rollup_data[0].block_root_or_block_merge_public_inputs.out_hash, + previous_rollup_data[1].block_root_or_block_merge_public_inputs.out_hash + ] + ) +} pub fn compute_kernel_out_hash(l2_to_l1_msgs: [Field; MAX_L2_TO_L1_MSGS_PER_TX]) -> Field { let non_empty_items = array_length(l2_to_l1_msgs); diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/lib.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/lib.nr index 564847d2bac8..e79a948ebf6c 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/lib.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/lib.nr @@ -6,6 +6,12 @@ mod base; // Merge rollup mod merge; +// Block root rollup +mod block_root; + +// Block merge rollup +mod block_merge; + // Root rollup mod root; diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/merge/merge_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/merge/merge_rollup_inputs.nr index 792e62acd32f..6e4bb4336a90 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/merge/merge_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/merge/merge_rollup_inputs.nr @@ -81,14 +81,6 @@ mod tests { let _output = inputs.merge_rollup_circuit(); } - #[test(should_fail_with="input proofs have different constants")] - fn constants_different_fails() { - let mut inputs = default_merge_rollup_inputs(); - inputs.previous_rollup_data[0].base_or_merge_rollup_public_inputs.constants.global_variables.chain_id = 1; - inputs.previous_rollup_data[1].base_or_merge_rollup_public_inputs.constants.global_variables.chain_id = 0; - let _output = inputs.merge_rollup_circuit(); - } - #[test(should_fail_with="input proofs have different constants")] fn constants_different_chain_id_fails() { let mut inputs = default_merge_rollup_inputs(); diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/mod.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/mod.nr index f15ede83ac78..c5b228f55a07 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/mod.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/mod.nr @@ -5,68 +5,19 @@ mod root_rollup_public_inputs; use root_rollup_inputs::RootRollupInputs; use root_rollup_public_inputs::RootRollupPublicInputs; -// TODO: Move all the following code to different files -use dep::types::{constants::NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, utils::uint256::U256, hash::sha256_to_field}; - -// See `test_message_input_flattened_length` on keeping this in sync, -// why its here and how this constant was computed. -global NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP_NUM_BYTES: u32 = 512; - -// Computes the messages hash from the leaves array -// -// TODO(Miranda): remove? This appears to be unused -// Returns the hash truncated to one field element -fn compute_messages_hash(leaves: [Field; NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP]) -> Field { - // Slice variation - // let mut hash_input_flattened = []; - // for leaf in leaves { - // let input_as_bytes = leaf.to_be_bytes(32); - // for i in 0..32 { - // // TODO(Kev): should check the complexity of repeatedly pushing - // hash_input_flattened.push(input_as_bytes[i]); - // } - // } - - // Convert each field element into a byte array and append the bytes to `hash_input_flattened` - let mut hash_input_flattened = [0; NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP_NUM_BYTES]; - for offset in 0..NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP { - let input_as_bytes = leaves[offset].to_be_bytes(32); - for byte_index in 0..32 { - hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; - } - } - - sha256_to_field(hash_input_flattened) -} - -#[test] -fn test_message_input_flattened_length() { - // This is here so that the global doesn't become outdated. - // - // The short term solution to remove this is to use slices, though - // those are a bit experimental right now, so TODO I'll add a test that the - // slice version of compute_messages_hash is the same as the array version. - // which uses the NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP_NUM_BYTES global. - assert(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 32 == NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP_NUM_BYTES); -} - mod tests { - use crate::{ - root::{root_rollup_inputs::RootRollupInputs, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP_NUM_BYTES}, - tests::root_rollup_inputs::default_root_rollup_inputs - }; - use dep::types::utils::{uint256::U256, field::field_from_bytes_32_trunc}; + use crate::{tests::root_rollup_inputs::default_root_rollup_inputs}; use dep::types::hash::accumulate_sha256; #[test] fn check_block_hashes_empty_blocks() { - let expected_txs_effects_hash = accumulate_sha256([1, 2]); + let expected_out_hash = accumulate_sha256([1, 2]); let inputs = default_root_rollup_inputs(); let outputs = inputs.root_rollup_circuit(); - // check txs effects hash - assert_eq(outputs.header.content_commitment.txs_effects_hash, expected_txs_effects_hash); + // check out hash + assert_eq(outputs.out_hash, expected_out_hash); } #[test] @@ -75,15 +26,21 @@ mod tests { let outputs = inputs.root_rollup_circuit(); assert( - outputs.header.state.partial.note_hash_tree.eq(inputs.previous_rollup_data[1].base_or_merge_rollup_public_inputs.end.note_hash_tree) + outputs.previous_archive.eq(inputs.previous_rollup_data[0].block_root_or_block_merge_public_inputs.previous_archive) + ); + + assert( + outputs.end_archive.eq(inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.new_archive) ); assert( - outputs.header.state.partial.nullifier_tree.eq(inputs.previous_rollup_data[1].base_or_merge_rollup_public_inputs.end.nullifier_tree) + outputs.end_timestamp.eq( + inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.end_global_variables.timestamp + ) ); assert( - outputs.header.state.partial.public_data_tree.eq(inputs.previous_rollup_data[1].base_or_merge_rollup_public_inputs.end.public_data_tree) + outputs.end_block_hash.eq(inputs.previous_rollup_data[1].block_root_or_block_merge_public_inputs.end_block_hash) ); } } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_inputs.nr index 2331a0931959..f4d503a19187 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_inputs.nr @@ -1,43 +1,28 @@ use crate::{ - abis::{previous_rollup_data::PreviousRollupData, constant_rollup_data::ConstantRollupData}, - components, root::{root_rollup_public_inputs::RootRollupPublicInputs} + abis::previous_rollup_block_data::PreviousRollupBlockData, components, + root::root_rollup_public_inputs::RootRollupPublicInputs }; -use parity_lib::{root::root_rollup_parity_input::RootRollupParityInput, ParityPublicInputs}; -use types::{ - abis::{append_only_tree_snapshot::AppendOnlyTreeSnapshot, nullifier_leaf_preimage::NullifierLeafPreimage}, - constants::{ - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, L1_TO_L2_MSG_SUBTREE_HEIGHT, - L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, ARCHIVE_HEIGHT, BASE_ROLLUP_INDEX, MERGE_ROLLUP_INDEX -}, - header::Header, content_commitment::ContentCommitment, - merkle_tree::{append_only_tree, calculate_subtree_root, calculate_empty_tree_root}, - state_reference::StateReference, traits::Empty -}; - +use types::{traits::Empty, constants::{BLOCK_ROOT_ROLLUP_INDEX, BLOCK_MERGE_ROLLUP_INDEX}}; +// TODO(#7346): Currently unused! Will be used when batch rollup circuits are integrated. global ALLOWED_PREVIOUS_CIRCUITS = [ - BASE_ROLLUP_INDEX, - MERGE_ROLLUP_INDEX, + BLOCK_ROOT_ROLLUP_INDEX, + BLOCK_MERGE_ROLLUP_INDEX, ]; struct RootRollupInputs { - // All below are shared between the base and merge rollups - previous_rollup_data : [PreviousRollupData; 2], - - l1_to_l2_roots: RootRollupParityInput, - - // inputs required to process l1 to l2 messages - l1_to_l2_messages : [Field; NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP], - l1_to_l2_message_subtree_sibling_path : [Field; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH], - - start_l1_to_l2_message_tree_snapshot : AppendOnlyTreeSnapshot, - - // inputs required to add the block hash - start_archive_snapshot : AppendOnlyTreeSnapshot, - new_archive_sibling_path : [Field; ARCHIVE_HEIGHT], - + previous_rollup_data: [PreviousRollupBlockData; 2], prover_id: Field, } +impl Empty for RootRollupInputs { + fn empty() -> Self { + RootRollupInputs { + previous_rollup_data: [PreviousRollupBlockData::empty(); 2], + prover_id: 0, + } + } +} + impl RootRollupInputs { pub fn root_rollup_circuit(self) -> RootRollupPublicInputs { // Verify the previous rollup proofs @@ -49,80 +34,36 @@ impl RootRollupInputs { self.previous_rollup_data[1].verify(); // TODO(#7410) we need the tube vk to reinstate this // self.previous_rollup_data[1].validate_in_vk_tree(ALLOWED_PREVIOUS_CIRCUITS); - - // verify the root parity - self.l1_to_l2_roots.verify(); - // TODO(#7410) we need the tube vk to reinstate this - // self.l1_to_l2_roots.validate_in_vk_tree(); } - let left = self.previous_rollup_data[0].base_or_merge_rollup_public_inputs; - let right = self.previous_rollup_data[1].base_or_merge_rollup_public_inputs; - - components::assert_txs_filled_from_left(left, right); - components::assert_equal_constants(left, right); - components::assert_prev_rollups_follow_on_from_each_other(left, right); - - // Insert subtree into the l1 to l2 data tree - let empty_l1_to_l2_subtree_root = calculate_empty_tree_root(L1_TO_L2_MSG_SUBTREE_HEIGHT); - let new_l1_to_l2_message_tree_snapshot = append_only_tree::insert_subtree_to_snapshot_tree( - self.start_l1_to_l2_message_tree_snapshot, - self.l1_to_l2_message_subtree_sibling_path, - empty_l1_to_l2_subtree_root, - self.l1_to_l2_roots.public_inputs.converted_root, - // TODO(Kev): For now we can add a test that this fits inside of - // a u8. - L1_TO_L2_MSG_SUBTREE_HEIGHT as u8 - ); - - let state = StateReference { l1_to_l2_message_tree: new_l1_to_l2_message_tree_snapshot, partial: right.end }; - - let content_commitment = ContentCommitment { - num_txs: (left.num_txs + right.num_txs) as Field, - txs_effects_hash: components::compute_txs_effects_hash(self.previous_rollup_data), - in_hash: self.l1_to_l2_roots.public_inputs.sha_root, - out_hash: components::compute_out_hash(self.previous_rollup_data) - }; - - let total_fees = components::accumulate_fees(left, right); - - let vk_tree_root = left.constants.vk_tree_root; - - let header = Header { - last_archive: left.constants.last_archive, - content_commitment, - state, - global_variables: left.constants.global_variables, - total_fees - }; - - // Build the block hash for this by hashing the header and then insert the new leaf to archive tree. - let block_hash = header.hash(); - - // Update the archive - let archive = append_only_tree::insert_subtree_to_snapshot_tree( - self.start_archive_snapshot, - self.new_archive_sibling_path, - 0, - block_hash, - 0 - ); - - RootRollupPublicInputs { archive, header, vk_tree_root, prover_id: self.prover_id } - } -} - -impl Empty for RootRollupInputs { - fn empty() -> Self { - RootRollupInputs { - previous_rollup_data : [PreviousRollupData::empty(); 2], - l1_to_l2_roots: RootRollupParityInput::empty(), - l1_to_l2_messages : [0; NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP], - l1_to_l2_message_subtree_sibling_path : [0; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH], - start_l1_to_l2_message_tree_snapshot : AppendOnlyTreeSnapshot::zero(), - start_archive_snapshot : AppendOnlyTreeSnapshot::zero(), - new_archive_sibling_path : [0; ARCHIVE_HEIGHT], - prover_id: 0 + let left = self.previous_rollup_data[0].block_root_or_block_merge_public_inputs; + let right = self.previous_rollup_data[1].block_root_or_block_merge_public_inputs; + + // TODO(#7346): Constrain block rollup structure + // For full wonkiness (no wasted proving of padding blocks for any number of blocks, but more complex hash recalculation): + // components::assert_blocks_filled_from_left(left, right); + // For variable height but balanced tree (no wasted proving only for number of blocks = power of 2, simpler hash recalculation): + // assert(left.num_blocks == right.num_blocks) + // if (num_blocks == 1) { assert(vk_witness.index == BLOCK_ROOT_ROLLUP_INDEX) } else { assert(vk_witness.index == BLOCK_MERGE_ROLLUP_INDEX)} + // ^ Where instead of num_txs, use num_blocks = (end_global_variables.block_number - start_global_variables.block_number) + 1 + + components::assert_prev_block_rollups_follow_on_from_each_other(left, right); + + let out_hash = components::compute_blocks_out_hash(self.previous_rollup_data); + + let fees = components::accumulate_blocks_fees(left, right); + + RootRollupPublicInputs { + previous_archive: left.previous_archive, + end_archive: right.new_archive, + previous_block_hash: left.previous_block_hash, + end_block_hash: right.end_block_hash, + end_timestamp: right.end_global_variables.timestamp, + end_block_number: right.end_global_variables.block_number, + out_hash, + fees, + vk_tree_root: left.vk_tree_root, + prover_id: self.prover_id } } } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_public_inputs.nr index 9a49f2c8d50d..15b239b8714b 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/root/root_rollup_public_inputs.nr @@ -1,15 +1,16 @@ -use dep::types::{abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot, header::Header}; - +use dep::types::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; +use crate::abis::block_root_or_block_merge_public_inputs::FeeRecipient; +// TODO(#7346): Currently unused! Will be used when batch rollup circuits are integrated. struct RootRollupPublicInputs { - // Snapshot of archive tree after this block/rollup been processed - archive: AppendOnlyTreeSnapshot, - - // Root of the protocol circuits VK tree + // Snapshot of archive tree before/after this rollup has been processed + previous_archive: AppendOnlyTreeSnapshot, + end_archive: AppendOnlyTreeSnapshot, + previous_block_hash: Field, + end_block_hash: Field, + end_timestamp: u64, + end_block_number: Field, + out_hash: Field, + fees: [FeeRecipient; 32], vk_tree_root: Field, - - // New block header - header: Header, - - // Identifier of the prover prover_id: Field, } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/block_merge_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/block_merge_rollup_inputs.nr new file mode 100644 index 000000000000..d76ccc5e8735 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/block_merge_rollup_inputs.nr @@ -0,0 +1,10 @@ +use crate::block_merge::block_merge_rollup_inputs::BlockMergeRollupInputs; +use crate::tests::previous_rollup_block_data::default_previous_rollup_block_data; + +pub fn default_block_merge_rollup_inputs() -> BlockMergeRollupInputs { + let mut inputs = BlockMergeRollupInputs::empty(); + + inputs.previous_rollup_data = default_previous_rollup_block_data(); + + inputs +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/block_root_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/block_root_rollup_inputs.nr new file mode 100644 index 000000000000..e8c4d2ffbe7a --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/block_root_rollup_inputs.nr @@ -0,0 +1,56 @@ +use crate::block_root::block_root_rollup_inputs::BlockRootRollupInputs; +use dep::types::{ + abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot, + constants::{ + L1_TO_L2_MSG_TREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, L1_TO_L2_MSG_SUBTREE_HEIGHT, + ARCHIVE_HEIGHT +}, + tests::merkle_tree_utils::compute_zero_hashes +}; +use crate::tests::previous_rollup_data::default_previous_rollup_data; +use crate::tests::l1_to_l2_roots::default_root_rollup_parity_input; + +pub fn compute_l1_l2_empty_snapshot() -> (AppendOnlyTreeSnapshot, [Field; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH]) { + let zero_hashes = compute_zero_hashes([0; L1_TO_L2_MSG_TREE_HEIGHT]); + let mut l1_to_l2_message_subtree_sibling_path = [0; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH]; + + for i in 0..L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH { + let index = L1_TO_L2_MSG_SUBTREE_HEIGHT + i - 1; + l1_to_l2_message_subtree_sibling_path[i] = zero_hashes[index]; + } + + ( + AppendOnlyTreeSnapshot { root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, l1_to_l2_message_subtree_sibling_path + ) +} + +pub fn compute_archive_snapshot() -> (AppendOnlyTreeSnapshot, [Field; ARCHIVE_HEIGHT]) { + let zero_hashes = compute_zero_hashes([0; ARCHIVE_HEIGHT]); + let mut sibling_path = [0; ARCHIVE_HEIGHT]; + for i in 1..ARCHIVE_HEIGHT { + sibling_path[i] = zero_hashes[i-1]; + } + ( + AppendOnlyTreeSnapshot { root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, sibling_path + ) +} + +pub fn default_block_root_rollup_inputs() -> BlockRootRollupInputs { + let mut inputs = BlockRootRollupInputs::empty(); + + inputs.l1_to_l2_roots = default_root_rollup_parity_input(); + + let (l1_l2_empty_snapshot, l1_l2_empty_sibling_path) = compute_l1_l2_empty_snapshot(); + + inputs.l1_to_l2_message_subtree_sibling_path = l1_l2_empty_sibling_path; + inputs.start_l1_to_l2_message_tree_snapshot = l1_l2_empty_snapshot; + + let (blocks_snapshot, blocks_sibling_path) = compute_archive_snapshot(); + + inputs.start_archive_snapshot = blocks_snapshot; + inputs.new_archive_sibling_path = blocks_sibling_path; + + inputs.previous_rollup_data = default_previous_rollup_data(); + + inputs +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/merge_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/merge_rollup_inputs.nr index c19e174638c7..088c3158aab5 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/merge_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/merge_rollup_inputs.nr @@ -1,6 +1,4 @@ use crate::merge::merge_rollup_inputs::MergeRollupInputs; -use crate::abis::base_or_merge_rollup_public_inputs::BASE_ROLLUP_TYPE; -use dep::types::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::tests::previous_rollup_data::default_previous_rollup_data; pub fn default_merge_rollup_inputs() -> MergeRollupInputs { diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/mod.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/mod.nr index 6008fb5f449a..d1806e911c87 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/mod.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/mod.nr @@ -1,4 +1,7 @@ mod merge_rollup_inputs; +mod block_root_rollup_inputs; +mod block_merge_rollup_inputs; mod root_rollup_inputs; mod previous_rollup_data; +mod previous_rollup_block_data; mod l1_to_l2_roots; diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/previous_rollup_block_data.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/previous_rollup_block_data.nr new file mode 100644 index 000000000000..14d655f2835c --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/previous_rollup_block_data.nr @@ -0,0 +1,66 @@ +use crate::abis::previous_rollup_block_data::PreviousRollupBlockData; +use dep::types::constants::BLOCK_ROOT_ROLLUP_INDEX; +use dep::types::tests::fixtures; +use dep::types::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; +use dep::types::merkle_tree::MembershipWitness; + +pub fn default_previous_rollup_block_data() -> [PreviousRollupBlockData; 2] { + let mut previous_rollup_data = [PreviousRollupBlockData::empty(); 2]; + + let vk_index = BLOCK_ROOT_ROLLUP_INDEX; + let vk_tree = fixtures::vk_tree::get_vk_merkle_tree(); + let vk_hash = vk_tree.leaves[vk_index]; + let vk_path = vk_tree.get_sibling_path(vk_index); + let vk_tree_root = vk_tree.get_root(); + + previous_rollup_data[0].block_root_or_block_merge_public_inputs.vk_tree_root = vk_tree_root; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.vk_tree_root = vk_tree_root; + + previous_rollup_data[0].vk.hash = vk_hash; + previous_rollup_data[1].vk.hash = vk_hash; + + previous_rollup_data[0].vk_witness = MembershipWitness { + leaf_index: vk_index as Field, + sibling_path: vk_path + }; + previous_rollup_data[1].vk_witness = MembershipWitness { + leaf_index: vk_index as Field, + sibling_path: vk_path + }; + + previous_rollup_data[0].block_root_or_block_merge_public_inputs.previous_archive = AppendOnlyTreeSnapshot { + root: 0, + next_available_leaf_index: 0 + }; + previous_rollup_data[0].block_root_or_block_merge_public_inputs.new_archive = AppendOnlyTreeSnapshot { + root: 1, + next_available_leaf_index: 1 + }; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.previous_archive= AppendOnlyTreeSnapshot { + root: 1, + next_available_leaf_index: 1 + }; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.new_archive = AppendOnlyTreeSnapshot { + root: 2, + next_available_leaf_index: 2 + }; + + previous_rollup_data[0].block_root_or_block_merge_public_inputs.previous_block_hash = 1; + previous_rollup_data[0].block_root_or_block_merge_public_inputs.end_block_hash = 2; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.previous_block_hash = 2; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.end_block_hash = 3; + + // previous_rollup_data is from one block_root circuit => ecompasses a single block (block 1) + previous_rollup_data[1].block_root_or_block_merge_public_inputs.start_global_variables.block_number = 1; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.end_global_variables.block_number = 1; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.start_global_variables.timestamp = 2; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.end_global_variables.timestamp = 2; + + previous_rollup_data[0].block_root_or_block_merge_public_inputs.out_hash = 1; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.out_hash = 2; + + previous_rollup_data[0].block_root_or_block_merge_public_inputs.fees[0].value = 10; + previous_rollup_data[1].block_root_or_block_merge_public_inputs.fees[0].value = 15; + + previous_rollup_data +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/root_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/root_rollup_inputs.nr index cdc41a9a1f9c..6ec4db322780 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/root_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/tests/root_rollup_inputs.nr @@ -1,56 +1,10 @@ -use crate::{root::{root_rollup_inputs::RootRollupInputs}}; -use dep::types::{ - abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot, - constants::{ - L1_TO_L2_MSG_TREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, L1_TO_L2_MSG_SUBTREE_HEIGHT, - ARCHIVE_HEIGHT -}, - tests::merkle_tree_utils::compute_zero_hashes -}; -use crate::tests::previous_rollup_data::default_previous_rollup_data; -use crate::tests::l1_to_l2_roots::default_root_rollup_parity_input; - -pub fn compute_l1_l2_empty_snapshot() -> (AppendOnlyTreeSnapshot, [Field; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH]) { - let zero_hashes = compute_zero_hashes([0; L1_TO_L2_MSG_TREE_HEIGHT]); - let mut l1_to_l2_message_subtree_sibling_path = [0; L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH]; - - for i in 0..L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH { - let index = L1_TO_L2_MSG_SUBTREE_HEIGHT + i - 1; - l1_to_l2_message_subtree_sibling_path[i] = zero_hashes[index]; - } - - ( - AppendOnlyTreeSnapshot { root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, l1_to_l2_message_subtree_sibling_path - ) -} - -pub fn compute_archive_snapshot() -> (AppendOnlyTreeSnapshot, [Field; ARCHIVE_HEIGHT]) { - let zero_hashes = compute_zero_hashes([0; ARCHIVE_HEIGHT]); - let mut sibling_path = [0; ARCHIVE_HEIGHT]; - for i in 1..ARCHIVE_HEIGHT { - sibling_path[i] = zero_hashes[i-1]; - } - ( - AppendOnlyTreeSnapshot { root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, sibling_path - ) -} +use crate::root::root_rollup_inputs::RootRollupInputs; +use crate::tests::previous_rollup_block_data::default_previous_rollup_block_data; pub fn default_root_rollup_inputs() -> RootRollupInputs { let mut inputs = RootRollupInputs::empty(); - inputs.l1_to_l2_roots = default_root_rollup_parity_input(); - - let (l1_l2_empty_snapshot, l1_l2_empty_sibling_path) = compute_l1_l2_empty_snapshot(); - - inputs.l1_to_l2_message_subtree_sibling_path = l1_l2_empty_sibling_path; - inputs.start_l1_to_l2_message_tree_snapshot = l1_l2_empty_snapshot; - - let (blocks_snapshot, blocks_sibling_path) = compute_archive_snapshot(); - - inputs.start_archive_snapshot = blocks_snapshot; - inputs.new_archive_sibling_path = blocks_sibling_path; - - inputs.previous_rollup_data = default_previous_rollup_data(); + inputs.previous_rollup_data = default_previous_rollup_block_data(); inputs } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index cf082ee91ff7..6a16635cd593 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -117,7 +117,9 @@ global BASE_PARITY_INDEX: u32 = 18; global ROOT_PARITY_INDEX: u32 = 19; global BASE_ROLLUP_INDEX: u32 = 20; global MERGE_ROLLUP_INDEX: u32 = 21; -global ROOT_ROLLUP_INDEX: u32 = 22; +global BLOCK_ROOT_ROLLUP_INDEX: u32 = 22; +global BLOCK_MERGE_ROLLUP_INDEX: u32 = 23; +global ROOT_ROLLUP_INDEX: u32 = 24; // MISC CONSTANTS global FUNCTION_SELECTOR_NUM_BYTES: Field = 4; @@ -259,6 +261,8 @@ global CONSTANT_ROLLUP_DATA_LENGTH = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + 1 + GLOB // + 5 for rollup_type, height_in_block_tree, txs_effects_hash, out_hash, accumulated_fees global BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = CONSTANT_ROLLUP_DATA_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH + 5; +// + 64 for 32 * FeeRecipient { recipient, value }, + 4 for previous_block_hash, end_block_hash, out_hash, vk_tree_root + 1 temporarily for prover_id +global BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH = 2 * APPEND_ONLY_TREE_SNAPSHOT_LENGTH + 2 * GLOBAL_VARIABLES_LENGTH + 69; global GET_NOTES_ORACLE_RETURN_LENGTH: u32 = 674; global NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP: u32 = 32 * MAX_NOTE_HASHES_PER_TX; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr index 1069bfb9ec9f..cf875776cc82 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/vk_tree.nr @@ -4,7 +4,8 @@ use crate::constants::{ PRIVATE_KERNEL_RESET_SMALL_INDEX, PRIVATE_KERNEL_TAIL_INDEX, PRIVATE_KERNEL_TAIL_TO_PUBLIC_INDEX, EMPTY_NESTED_INDEX, PRIVATE_KERNEL_EMPTY_INDEX, PUBLIC_KERNEL_SETUP_INDEX, PUBLIC_KERNEL_APP_LOGIC_INDEX, PUBLIC_KERNEL_TEARDOWN_INDEX, PUBLIC_KERNEL_TAIL_INDEX, - BASE_PARITY_INDEX, ROOT_PARITY_INDEX, BASE_ROLLUP_INDEX, MERGE_ROLLUP_INDEX, ROOT_ROLLUP_INDEX, + BASE_PARITY_INDEX, ROOT_PARITY_INDEX, BASE_ROLLUP_INDEX, MERGE_ROLLUP_INDEX, + BLOCK_ROOT_ROLLUP_INDEX, BLOCK_MERGE_ROLLUP_INDEX, ROOT_ROLLUP_INDEX, PRIVATE_KERNEL_RESET_TINY_INDEX }; use crate::merkle_tree::merkle_tree::MerkleTree; @@ -39,7 +40,9 @@ pub fn get_vk_merkle_tree() -> MerkleTree { leaves[ROOT_PARITY_INDEX] = 19; leaves[BASE_ROLLUP_INDEX] = 20; leaves[MERGE_ROLLUP_INDEX] = 21; - leaves[ROOT_ROLLUP_INDEX] = 22; + leaves[BLOCK_ROOT_ROLLUP_INDEX] = 22; + leaves[BLOCK_MERGE_ROLLUP_INDEX] = 23; + leaves[ROOT_ROLLUP_INDEX] = 24; MerkleTree::new(leaves) } diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 6c1984e094d0..ef8247d0429e 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -291,10 +291,11 @@ function makeProofVerifiedEvent(l1BlockNum: bigint, l2BlockNumber: bigint, prove function makeRollupTx(l2Block: L2Block) { const header = toHex(l2Block.header.toBuffer()); const archive = toHex(l2Block.archive.root.toBuffer()); + const blockHash = toHex(l2Block.header.hash().toBuffer()); const input = encodeFunctionData({ abi: RollupAbi, functionName: 'process', - args: [header, archive], + args: [header, archive, blockHash], }); return { input } as Transaction; } diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index 2d844409b4d0..a3643cd6491c 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -100,7 +100,7 @@ async function getBlockMetadataFromRollupTx( if (!(functionName === 'process' || functionName === 'publishAndProcess')) { throw new Error(`Unexpected method called ${functionName}`); } - const [headerHex, archiveRootHex] = args! as readonly [Hex, Hex]; + const [headerHex, archiveRootHex, _] = args! as readonly [Hex, Hex, Hex]; const header = Header.fromBuffer(Buffer.from(hexToBytes(headerHex))); @@ -133,16 +133,16 @@ async function getBlockBodiesFromAvailabilityOracleTx( txHash: `0x${string}`, ): Promise { const { input: data } = await publicClient.getTransaction({ hash: txHash }); - const DATA_INDEX = [3, 2, 0]; + const DATA_INDEX = [4, 3, 0]; // @note Use `forge inspect Rollup methodIdentifiers to get this, // If using `forge sig` you will get an INVALID value for the case with a struct. // [ - // "publishAndProcess(bytes calldata _header,bytes32 _archive,SignatureLib.Signature[] memory _signatures,bytes calldata _body)", - // "publishAndProcess(bytes calldata _header,bytes32 _archive,bytes calldata _body)", + // "publishAndProcess(bytes calldata _header,bytes32 _archive,bytes32 _blockHash,SignatureLib.Signature[] memory _signatures,bytes calldata _body)", + // "publishAndProcess(bytes calldata _header,bytes32 _archive,bytes32 _blockHash,bytes calldata _body)", // "publish(bytes calldata _body)" // ] - const SUPPORTED_SIGS = ['0xe4e90c26', '0xe86e3595', '0x7fd28346']; + const SUPPORTED_SIGS = ['0x64450c6c', '0xde36c478', '0x7fd28346']; const signature = slice(data, 0, 4); diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index af3c839ff2dd..dcd09631a13d 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -14,6 +14,9 @@ import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, type BaseRollupInputs, + type BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + type BlockRootRollupInputs, EmptyNestedCircuitInputs, EmptyNestedData, Fr, @@ -47,6 +50,10 @@ import { convertBaseParityOutputsFromWitnessMap, convertBaseRollupInputsToWitnessMap, convertBaseRollupOutputsFromWitnessMap, + convertBlockMergeRollupInputsToWitnessMap, + convertBlockMergeRollupOutputsFromWitnessMap, + convertBlockRootRollupInputsToWitnessMap, + convertBlockRootRollupOutputsFromWitnessMap, convertMergeRollupInputsToWitnessMap, convertMergeRollupOutputsFromWitnessMap, convertPrivateKernelEmptyInputsToWitnessMap, @@ -324,6 +331,55 @@ export class BBNativeRollupProver implements ServerCircuitProver { return makePublicInputsAndRecursiveProof(circuitOutput, proof, verificationKey); } + /** + * Simulates the block root rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getBlockRootRollupProof( + input: BlockRootRollupInputs, + ): Promise> { + // TODO(#7346): When batch rollups are integrated, we probably want the below to be this.createRecursiveProof + // since we will no longer be verifying it directly on L1 + const { circuitOutput, proof } = await this.createProof( + input, + 'BlockRootRollupArtifact', + convertBlockRootRollupInputsToWitnessMap, + convertBlockRootRollupOutputsFromWitnessMap, + ); + + const recursiveProof = makeRecursiveProofFromBinary(proof, NESTED_RECURSIVE_PROOF_LENGTH); + + const verificationKey = await this.getVerificationKeyDataForCircuit('BlockRootRollupArtifact'); + + await this.verifyProof('BlockRootRollupArtifact', proof); + + return makePublicInputsAndRecursiveProof(circuitOutput, recursiveProof, verificationKey); + } + + /** + * Simulates the block merge rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async getBlockMergeRollupProof( + input: BlockMergeRollupInputs, + ): Promise> { + const { circuitOutput, proof } = await this.createRecursiveProof( + input, + 'BlockMergeRollupArtifact', + NESTED_RECURSIVE_PROOF_LENGTH, + convertBlockMergeRollupInputsToWitnessMap, + convertBlockMergeRollupOutputsFromWitnessMap, + ); + + const verificationKey = await this.getVerificationKeyDataForCircuit('BlockMergeRollupArtifact'); + + await this.verifyProof('BlockMergeRollupArtifact', proof.binaryProof); + + return makePublicInputsAndRecursiveProof(circuitOutput, proof, verificationKey); + } + /** * Simulates the root rollup circuit from its inputs. * @param input - Inputs to the circuit. diff --git a/yarn-project/bb-prover/src/stats.ts b/yarn-project/bb-prover/src/stats.ts index ca0fa1ba9ca7..d96399f677e8 100644 --- a/yarn-project/bb-prover/src/stats.ts +++ b/yarn-project/bb-prover/src/stats.ts @@ -15,6 +15,10 @@ export function mapProtocolArtifactNameToCircuitName( return 'base-rollup'; case 'MergeRollupArtifact': return 'merge-rollup'; + case 'BlockRootRollupArtifact': + return 'block-root-rollup'; + case 'BlockMergeRollupArtifact': + return 'block-merge-rollup'; case 'RootRollupArtifact': return 'root-rollup'; case 'PublicKernelSetupArtifact': diff --git a/yarn-project/bb-prover/src/test/test_circuit_prover.ts b/yarn-project/bb-prover/src/test/test_circuit_prover.ts index f9c5560e5be4..ec8ed4ea3312 100644 --- a/yarn-project/bb-prover/src/test/test_circuit_prover.ts +++ b/yarn-project/bb-prover/src/test/test_circuit_prover.ts @@ -11,6 +11,9 @@ import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, type BaseRollupInputs, + type BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + type BlockRootRollupInputs, EmptyNestedData, type KernelCircuitPublicInputs, type MergeRollupInputs, @@ -42,6 +45,10 @@ import { SimulatedServerCircuitArtifacts, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, + convertBlockMergeRollupInputsToWitnessMap, + convertBlockMergeRollupOutputsFromWitnessMap, + convertBlockRootRollupInputsToWitnessMap, + convertBlockRootRollupOutputsFromWitnessMap, convertMergeRollupInputsToWitnessMap, convertMergeRollupOutputsFromWitnessMap, convertPrivateKernelEmptyInputsToWitnessMap, @@ -299,6 +306,76 @@ export class TestCircuitProver implements ServerCircuitProver { ); } + /** + * Simulates the block root rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + @trackSpan('TestCircuitProver.getBlockRootRollupProof') + public async getBlockRootRollupProof( + input: BlockRootRollupInputs, + ): Promise> { + const timer = new Timer(); + const witnessMap = convertBlockRootRollupInputsToWitnessMap(input); + + // use WASM here as it is faster for small circuits + const witness = await this.wasmSimulator.simulateCircuit( + witnessMap, + SimulatedServerCircuitArtifacts.BlockRootRollupArtifact, + ); + + const result = convertBlockRootRollupOutputsFromWitnessMap(witness); + + this.instrumentation.recordDuration('simulationDuration', 'block-root-rollup', timer); + emitCircuitSimulationStats( + 'block-root-rollup', + timer.ms(), + input.toBuffer().length, + result.toBuffer().length, + this.logger, + ); + return makePublicInputsAndRecursiveProof( + result, + makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + ProtocolCircuitVks['BlockRootRollupArtifact'], + ); + } + + /** + * Simulates the block merge rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + @trackSpan('TestCircuitProver.getBlockMergeRollupProof') + public async getBlockMergeRollupProof( + input: BlockMergeRollupInputs, + ): Promise> { + const timer = new Timer(); + const witnessMap = convertBlockMergeRollupInputsToWitnessMap(input); + + // use WASM here as it is faster for small circuits + const witness = await this.wasmSimulator.simulateCircuit( + witnessMap, + SimulatedServerCircuitArtifacts.BlockMergeRollupArtifact, + ); + + const result = convertBlockMergeRollupOutputsFromWitnessMap(witness); + + this.instrumentation.recordDuration('simulationDuration', 'block-merge-rollup', timer); + emitCircuitSimulationStats( + 'block-merge-rollup', + timer.ms(), + input.toBuffer().length, + result.toBuffer().length, + this.logger, + ); + return makePublicInputsAndRecursiveProof( + result, + makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + ProtocolCircuitVks['BlockMergeRollupArtifact'], + ); + } + /** * Simulates the root rollup circuit from its inputs. * @param input - Inputs to the circuit. diff --git a/yarn-project/circuit-types/src/interfaces/proving-job.ts b/yarn-project/circuit-types/src/interfaces/proving-job.ts index 22cfacb5b56f..e496875bcf57 100644 --- a/yarn-project/circuit-types/src/interfaces/proving-job.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job.ts @@ -3,6 +3,9 @@ import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, type BaseRollupInputs, + type BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + type BlockRootRollupInputs, type KernelCircuitPublicInputs, type MergeRollupInputs, type NESTED_RECURSIVE_PROOF_LENGTH, @@ -66,6 +69,8 @@ export enum ProvingRequestType { BASE_ROLLUP, MERGE_ROLLUP, + BLOCK_ROOT_ROLLUP, + BLOCK_MERGE_ROLLUP, ROOT_ROLLUP, BASE_PARITY, @@ -105,6 +110,14 @@ export type ProvingRequest = type: ProvingRequestType.MERGE_ROLLUP; inputs: MergeRollupInputs; } + | { + type: ProvingRequestType.BLOCK_ROOT_ROLLUP; + inputs: BlockRootRollupInputs; + } + | { + type: ProvingRequestType.BLOCK_MERGE_ROLLUP; + inputs: BlockMergeRollupInputs; + } | { type: ProvingRequestType.ROOT_ROLLUP; inputs: RootRollupInputs; @@ -127,6 +140,8 @@ export type ProvingRequestPublicInputs = { [ProvingRequestType.BASE_ROLLUP]: PublicInputsAndRecursiveProof; [ProvingRequestType.MERGE_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BLOCK_ROOT_ROLLUP]: PublicInputsAndRecursiveProof; + [ProvingRequestType.BLOCK_MERGE_ROLLUP]: PublicInputsAndRecursiveProof; [ProvingRequestType.ROOT_ROLLUP]: PublicInputsAndRecursiveProof; [ProvingRequestType.BASE_PARITY]: RootParityInput; diff --git a/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts b/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts index 40e4e01f33c3..60fc09fb9219 100644 --- a/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts +++ b/yarn-project/circuit-types/src/interfaces/server_circuit_prover.ts @@ -11,6 +11,9 @@ import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, type BaseRollupInputs, + type BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + type BlockRootRollupInputs, type KernelCircuitPublicInputs, type MergeRollupInputs, type NESTED_RECURSIVE_PROOF_LENGTH, @@ -80,6 +83,26 @@ export interface ServerCircuitProver { epochNumber?: number, ): Promise>; + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBlockRootRollupProof( + input: BlockRootRollupInputs, + signal?: AbortSignal, + epochNumber?: number, + ): Promise>; + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBlockMergeRollupProof( + input: BlockMergeRollupInputs, + signal?: AbortSignal, + epochNumber?: number, + ): Promise>; + /** * Creates a proof for the given input. * @param input - Input to the circuit. diff --git a/yarn-project/circuit-types/src/stats/stats.ts b/yarn-project/circuit-types/src/stats/stats.ts index af924d447503..d38c44765cbf 100644 --- a/yarn-project/circuit-types/src/stats/stats.ts +++ b/yarn-project/circuit-types/src/stats/stats.ts @@ -76,6 +76,8 @@ export type CircuitName = | 'root-parity' | 'base-rollup' | 'merge-rollup' + | 'block-root-rollup' + | 'block-merge-rollup' | 'root-rollup' | 'private-kernel-init' | 'private-kernel-inner' diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 4943d1fd7643..83d65161537f 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -77,7 +77,9 @@ export const BASE_PARITY_INDEX = 18; export const ROOT_PARITY_INDEX = 19; export const BASE_ROLLUP_INDEX = 20; export const MERGE_ROLLUP_INDEX = 21; -export const ROOT_ROLLUP_INDEX = 22; +export const BLOCK_ROOT_ROLLUP_INDEX = 22; +export const BLOCK_MERGE_ROLLUP_INDEX = 23; +export const ROOT_ROLLUP_INDEX = 24; export const FUNCTION_SELECTOR_NUM_BYTES = 4; export const ARGS_HASH_CHUNK_LENGTH = 16; export const ARGS_HASH_CHUNK_COUNT = 16; @@ -181,6 +183,7 @@ export const PUBLIC_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 3629; export const KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 663; export const CONSTANT_ROLLUP_DATA_LENGTH = 12; export const BASE_OR_MERGE_PUBLIC_INPUTS_LENGTH = 29; +export const BLOCK_ROOT_OR_BLOCK_MERGE_PUBLIC_INPUTS_LENGTH = 91; export const GET_NOTES_ORACLE_RETURN_LENGTH = 674; export const NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP = 2048; export const NULLIFIERS_NUM_BYTES_PER_BASE_ROLLUP = 2048; diff --git a/yarn-project/circuits.js/src/structs/index.ts b/yarn-project/circuits.js/src/structs/index.ts index 5b03befe9744..68b6261a0dc2 100644 --- a/yarn-project/circuits.js/src/structs/index.ts +++ b/yarn-project/circuits.js/src/structs/index.ts @@ -72,7 +72,11 @@ export * from './revert_code.js'; export * from './rollup/append_only_tree_snapshot.js'; export * from './rollup/base_or_merge_rollup_public_inputs.js'; export * from './rollup/base_rollup.js'; +export * from './rollup/block_merge_rollup.js'; +export * from './rollup/block_root_or_block_merge_public_inputs.js'; +export * from './rollup/block_root_rollup.js'; export * from './rollup/merge_rollup.js'; +export * from './rollup/previous_rollup_block_data.js'; export * from './rollup/previous_rollup_data.js'; export * from './rollup/root_rollup.js'; export * from './rollup/state_diff_hints.js'; diff --git a/yarn-project/circuits.js/src/structs/rollup/block_merge_rollup.test.ts b/yarn-project/circuits.js/src/structs/rollup/block_merge_rollup.test.ts new file mode 100644 index 000000000000..d8d26dd39d16 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/block_merge_rollup.test.ts @@ -0,0 +1,18 @@ +import { makeBlockMergeRollupInputs } from '../../tests/factories.js'; +import { BlockMergeRollupInputs } from './block_merge_rollup.js'; + +describe('BlockMergeRollupInputs', () => { + it('serializes to buffer and deserializes it back', () => { + const expected = makeBlockMergeRollupInputs(); + const buffer = expected.toBuffer(); + const res = BlockMergeRollupInputs.fromBuffer(buffer); + expect(res).toEqual(expected); + }); + + it('serializes to hex string and deserializes it back', () => { + const expected = makeBlockMergeRollupInputs(); + const str = expected.toString(); + const res = BlockMergeRollupInputs.fromString(str); + expect(res).toEqual(expected); + }); +}); diff --git a/yarn-project/circuits.js/src/structs/rollup/block_merge_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/block_merge_rollup.ts new file mode 100644 index 000000000000..5db57f8a55c5 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/block_merge_rollup.ts @@ -0,0 +1,53 @@ +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { PreviousRollupBlockData } from './previous_rollup_block_data.js'; + +/** + * Represents inputs of the block merge rollup circuit. + */ +export class BlockMergeRollupInputs { + constructor( + /** + * Previous rollup data from the 2 block merge or block root rollup circuits that preceded this merge rollup circuit. + */ + public previousRollupData: [PreviousRollupBlockData, PreviousRollupBlockData], + ) {} + + /** + * Serializes the inputs to a buffer. + * @returns The inputs serialized to a buffer. + */ + toBuffer() { + return serializeToBuffer(this.previousRollupData); + } + + /** + * Serializes the inputs to a hex string. + * @returns The instance serialized to a hex string. + */ + toString() { + return this.toBuffer().toString('hex'); + } + + /** + * Deserializes the inputs from a buffer. + * @param buffer - The buffer to deserialize from. + * @returns A new BlockMergeRollupInputs instance. + */ + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new BlockMergeRollupInputs([ + reader.readObject(PreviousRollupBlockData), + reader.readObject(PreviousRollupBlockData), + ]); + } + + /** + * Deserializes the inputs from a hex string. + * @param str - A hex string to deserialize from. + * @returns A new BlockMergeRollupInputs instance. + */ + static fromString(str: string) { + return BlockMergeRollupInputs.fromBuffer(Buffer.from(str, 'hex')); + } +} diff --git a/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs.ts b/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs.ts new file mode 100644 index 000000000000..193acbff4852 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs.ts @@ -0,0 +1,134 @@ +import { Fr } from '@aztec/foundation/fields'; +import { BufferReader, type Tuple, serializeToBuffer, serializeToFields } from '@aztec/foundation/serialize'; +import { type FieldsOf } from '@aztec/foundation/types'; + +import { GlobalVariables } from '../global_variables.js'; +import { EthAddress } from '../index.js'; +import { AppendOnlyTreeSnapshot } from './append_only_tree_snapshot.js'; + +/** + * Output of the block root and block merge rollup circuits. + */ +export class BlockRootOrBlockMergePublicInputs { + constructor( + /** + * Archive tree immediately before this block range. + */ + public previousArchive: AppendOnlyTreeSnapshot, + /** + * Archive tree after adding this block range. + */ + public newArchive: AppendOnlyTreeSnapshot, + /** + * Identifier of the previous block before the range. + */ + public previousBlockHash: Fr, + /** + * Identifier of the last block in the range. + */ + public endBlockHash: Fr, + /** + * Global variables for the first block in the range. + */ + public startGlobalVariables: GlobalVariables, + /** + * Global variables for the last block in the range. + */ + public endGlobalVariables: GlobalVariables, + /** + * SHA256 hash of outhash. Used to make public inputs constant-sized (to then be unpacked on-chain). + * Note: Truncated to 31 bytes to fit in Fr. + */ + public outHash: Fr, + /** + * The summed `transaction_fee`s and recipients of the constituent blocks. + */ + public fees: Tuple, + /** + * Root of the verification key tree. + */ + public vkTreeRoot: Fr, + /** + * TODO(#7346): Temporarily added prover_id while we verify block-root proofs on L1 + */ + public proverId: Fr, + ) {} + + /** + * Deserializes from a buffer or reader. + * @param buffer - Buffer or reader to read from. + * @returns The deserialized public inputs. + */ + static fromBuffer(buffer: Buffer | BufferReader): BlockRootOrBlockMergePublicInputs { + const reader = BufferReader.asReader(buffer); + return new BlockRootOrBlockMergePublicInputs( + reader.readObject(AppendOnlyTreeSnapshot), + reader.readObject(AppendOnlyTreeSnapshot), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + reader.readObject(GlobalVariables), + reader.readObject(GlobalVariables), + Fr.fromBuffer(reader), + reader.readArray(32, FeeRecipient), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + ); + } + + /** + * Serialize this as a buffer. + * @returns The buffer. + */ + toBuffer() { + return serializeToBuffer( + this.previousArchive, + this.newArchive, + this.previousBlockHash, + this.endBlockHash, + this.startGlobalVariables, + this.endGlobalVariables, + this.outHash, + this.fees, + this.vkTreeRoot, + this.proverId, + ); + } + + /** + * Serialize this as a hex string. + * @returns - The hex string. + */ + toString() { + return this.toBuffer().toString('hex'); + } + + /** + * Deserializes from a hex string. + * @param str - A hex string to deserialize from. + * @returns A new BaseOrMergeRollupPublicInputs instance. + */ + static fromString(str: string) { + return BlockRootOrBlockMergePublicInputs.fromBuffer(Buffer.from(str, 'hex')); + } +} + +export class FeeRecipient { + constructor(public recipient: EthAddress, public value: Fr) {} + + static fromBuffer(buffer: Buffer | BufferReader): FeeRecipient { + const reader = BufferReader.asReader(buffer); + return new FeeRecipient(reader.readObject(EthAddress), Fr.fromBuffer(reader)); + } + + toBuffer() { + return serializeToBuffer(this.recipient, this.value); + } + + static getFields(fields: FieldsOf) { + return [fields.recipient, fields.value] as const; + } + + toFields() { + return serializeToFields(...FeeRecipient.getFields(this)); + } +} diff --git a/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs_test.ts b/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs_test.ts new file mode 100644 index 000000000000..8f83f12dadc4 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/block_root_or_block_merge_public_inputs_test.ts @@ -0,0 +1,18 @@ +import { makeBlockRootOrBlockMergeRollupPublicInputs } from '../../tests/factories.js'; +import { BlockRootOrBlockMergePublicInputs } from './block_root_or_block_merge_public_inputs.js'; + +describe('BlockRootOrBlockMergePublicInputs', () => { + it(`serializes to buffer and deserializes it back`, () => { + const expected = makeBlockRootOrBlockMergeRollupPublicInputs(); + const buffer = expected.toBuffer(); + const res = BlockRootOrBlockMergePublicInputs.fromBuffer(buffer); + expect(res).toEqual(expected); + }); + + it(`serializes to hex string and deserializes it back`, () => { + const expected = makeBlockRootOrBlockMergeRollupPublicInputs(); + const str = expected.toString(); + const res = BlockRootOrBlockMergePublicInputs.fromString(str); + expect(res).toEqual(expected); + }); +}); diff --git a/yarn-project/circuits.js/src/structs/rollup/block_root_rollup.test.ts b/yarn-project/circuits.js/src/structs/rollup/block_root_rollup.test.ts new file mode 100644 index 000000000000..b9276b0cc55b --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/block_root_rollup.test.ts @@ -0,0 +1,18 @@ +import { makeBlockRootRollupInputs } from '../../tests/factories.js'; +import { BlockRootRollupInputs } from './block_root_rollup.js'; + +describe('BlockRootRollupInputs', () => { + it(`serializes a BlockRootRollupInputs to buffer and deserializes it back`, () => { + const expected = makeBlockRootRollupInputs(); + const buffer = expected.toBuffer(); + const res = BlockRootRollupInputs.fromBuffer(buffer); + expect(res).toEqual(expected); + }); + + it(`serializes a BlockRootRollupInputs to hex string and deserializes it back`, () => { + const expected = makeBlockRootRollupInputs(); + const str = expected.toString(); + const res = BlockRootRollupInputs.fromString(str); + expect(res).toEqual(expected); + }); +}); diff --git a/yarn-project/circuits.js/src/structs/rollup/block_root_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/block_root_rollup.ts new file mode 100644 index 000000000000..18de9e20c8d1 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/block_root_rollup.ts @@ -0,0 +1,131 @@ +import { Fr } from '@aztec/foundation/fields'; +import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; +import { type FieldsOf } from '@aztec/foundation/types'; + +import { + ARCHIVE_HEIGHT, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + NESTED_RECURSIVE_PROOF_LENGTH, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, +} from '../../constants.gen.js'; +import { RootParityInput } from '../parity/root_parity_input.js'; +import { AppendOnlyTreeSnapshot } from './append_only_tree_snapshot.js'; +import { PreviousRollupData } from './previous_rollup_data.js'; + +/** + * Represents inputs of the block root rollup circuit. + */ +export class BlockRootRollupInputs { + constructor( + /** + * The previous rollup data from 2 merge or base rollup circuits. + */ + public previousRollupData: [PreviousRollupData, PreviousRollupData], + /** + * The original and converted roots of the L1 to L2 messages subtrees. + */ + public l1ToL2Roots: RootParityInput, + /** + * New L1 to L2 messages. + */ + public newL1ToL2Messages: Tuple, + /** + * Sibling path of the new L1 to L2 message tree root. + */ + public newL1ToL2MessageTreeRootSiblingPath: Tuple, + /** + * Snapshot of the L1 to L2 message tree at the start of the rollup. + */ + public startL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot, + /** + * Snapshot of the historical block roots tree at the start of the rollup. + */ + public startArchiveSnapshot: AppendOnlyTreeSnapshot, + /** + * Sibling path of the new block tree root. + */ + public newArchiveSiblingPath: Tuple, + /** + * The hash of the block preceding this one. + * TODO(#7346): Integrate batch rollup circuits and inject below + */ + public previousBlockHash: Fr, + /** + * TODO(#7346): Temporarily added prover_id while we verify block-root proofs on L1 + */ + public proverId: Fr, + ) {} + + /** + * Serializes the inputs to a buffer. + * @returns - The inputs serialized to a buffer. + */ + toBuffer() { + return serializeToBuffer(...BlockRootRollupInputs.getFields(this)); + } + + /** + * Serializes the inputs to a hex string. + * @returns The instance serialized to a hex string. + */ + toString() { + return this.toBuffer().toString('hex'); + } + + /** + * Creates a new instance from fields. + * @param fields - Fields to create the instance from. + * @returns A new RootRollupInputs instance. + */ + static from(fields: FieldsOf): BlockRootRollupInputs { + return new BlockRootRollupInputs(...BlockRootRollupInputs.getFields(fields)); + } + + /** + * Extracts fields from an instance. + * @param fields - Fields to create the instance from. + * @returns An array of fields. + */ + static getFields(fields: FieldsOf) { + return [ + fields.previousRollupData, + fields.l1ToL2Roots, + fields.newL1ToL2Messages, + fields.newL1ToL2MessageTreeRootSiblingPath, + fields.startL1ToL2MessageTreeSnapshot, + fields.startArchiveSnapshot, + fields.newArchiveSiblingPath, + fields.previousBlockHash, + fields.proverId, + ] as const; + } + + /** + * Deserializes the inputs from a buffer. + * @param buffer - A buffer to deserialize from. + * @returns A new RootRollupInputs instance. + */ + static fromBuffer(buffer: Buffer | BufferReader): BlockRootRollupInputs { + const reader = BufferReader.asReader(buffer); + return new BlockRootRollupInputs( + [reader.readObject(PreviousRollupData), reader.readObject(PreviousRollupData)], + RootParityInput.fromBuffer(reader, NESTED_RECURSIVE_PROOF_LENGTH), + reader.readArray(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr), + reader.readArray(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, Fr), + reader.readObject(AppendOnlyTreeSnapshot), + reader.readObject(AppendOnlyTreeSnapshot), + reader.readArray(ARCHIVE_HEIGHT, Fr), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + ); + } + + /** + * Deserializes the inputs from a hex string. + * @param str - A hex string to deserialize from. + * @returns A new RootRollupInputs instance. + */ + static fromString(str: string) { + return BlockRootRollupInputs.fromBuffer(Buffer.from(str, 'hex')); + } +} diff --git a/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.test.ts b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.test.ts new file mode 100644 index 000000000000..31df4ca801ee --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.test.ts @@ -0,0 +1,11 @@ +import { makePreviousRollupBlockData } from '../../tests/factories.js'; +import { PreviousRollupBlockData } from './previous_rollup_block_data.js'; + +describe('PreviousRollupBlockData', () => { + it('serializes to buffer and deserializes it back', () => { + const expected = makePreviousRollupBlockData(); + const buffer = expected.toBuffer(); + const res = PreviousRollupBlockData.fromBuffer(buffer); + expect(res).toEqual(expected); + }); +}); diff --git a/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.ts b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.ts new file mode 100644 index 000000000000..2d203aa8ffd8 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/rollup/previous_rollup_block_data.ts @@ -0,0 +1,54 @@ +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { NESTED_RECURSIVE_PROOF_LENGTH, VK_TREE_HEIGHT } from '../../constants.gen.js'; +import { MembershipWitness } from '../membership_witness.js'; +import { RecursiveProof } from '../recursive_proof.js'; +import { VerificationKeyAsFields } from '../verification_key.js'; +import { BlockRootOrBlockMergePublicInputs } from './block_root_or_block_merge_public_inputs.js'; + +/** + * Represents the data of a previous block merge or block root rollup circuit. + */ +export class PreviousRollupBlockData { + constructor( + /** + * Public inputs to the block merge or block root rollup circuit. + */ + public blockRootOrBlockMergePublicInputs: BlockRootOrBlockMergePublicInputs, + /** + * The proof of the block merge or block root rollup circuit. + */ + public proof: RecursiveProof, + /** + * The verification key of the block merge or block root rollup circuit. + */ + public vk: VerificationKeyAsFields, + /** + * Sibling path of the rollup circuit's vk in a big tree of rollup circuit vks. + */ + public vkWitness: MembershipWitness, + ) {} + + /** + * Serializes previous rollup data to a buffer. + * @returns The buffer of the serialized previous rollup data. + */ + public toBuffer(): Buffer { + return serializeToBuffer(this.blockRootOrBlockMergePublicInputs, this.proof, this.vk, this.vkWitness); + } + + /** + * Deserializes previous rollup data from a buffer. + * @param buffer - A buffer to deserialize from. + * @returns A new PreviousRollupData instance. + */ + public static fromBuffer(buffer: Buffer | BufferReader): PreviousRollupBlockData { + const reader = BufferReader.asReader(buffer); + return new PreviousRollupBlockData( + reader.readObject(BlockRootOrBlockMergePublicInputs), + RecursiveProof.fromBuffer(reader, NESTED_RECURSIVE_PROOF_LENGTH), + reader.readObject(VerificationKeyAsFields), + MembershipWitness.fromBuffer(reader, VK_TREE_HEIGHT), + ); + } +} diff --git a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts index df1f06970b2c..f33487e23783 100644 --- a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts @@ -2,16 +2,9 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, type Tuple, serializeToBuffer, serializeToFields } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; -import { - ARCHIVE_HEIGHT, - L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, - NESTED_RECURSIVE_PROOF_LENGTH, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, -} from '../../constants.gen.js'; -import { Header } from '../header.js'; -import { RootParityInput } from '../parity/root_parity_input.js'; import { AppendOnlyTreeSnapshot } from './append_only_tree_snapshot.js'; -import { PreviousRollupData } from './previous_rollup_data.js'; +import { FeeRecipient } from './block_root_or_block_merge_public_inputs.js'; +import { PreviousRollupBlockData } from './previous_rollup_block_data.js'; /** * Represents inputs of the root rollup circuit. @@ -21,33 +14,9 @@ export class RootRollupInputs { /** * The previous rollup data. * Note: Root rollup circuit is the latest circuit the chain of circuits and the previous rollup data is the data - * from 2 merge or base rollup circuits. + * from 2 block merge circuits. */ - public previousRollupData: [PreviousRollupData, PreviousRollupData], - /** - * The original and converted roots of the L1 to L2 messages subtrees. - */ - public l1ToL2Roots: RootParityInput, - /** - * New L1 to L2 messages. - */ - public newL1ToL2Messages: Tuple, - /** - * Sibling path of the new L1 to L2 message tree root. - */ - public newL1ToL2MessageTreeRootSiblingPath: Tuple, - /** - * Snapshot of the L1 to L2 message tree at the start of the rollup. - */ - public startL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot, - /** - * Snapshot of the historical block roots tree at the start of the rollup. - */ - public startArchiveSnapshot: AppendOnlyTreeSnapshot, - /** - * Sibling path of the new block tree root. - */ - public newArchiveSiblingPath: Tuple, + public previousRollupData: [PreviousRollupBlockData, PreviousRollupBlockData], /** Identifier of the prover for this root rollup. */ public proverId: Fr, ) {} @@ -83,16 +52,7 @@ export class RootRollupInputs { * @returns An array of fields. */ static getFields(fields: FieldsOf) { - return [ - fields.previousRollupData, - fields.l1ToL2Roots, - fields.newL1ToL2Messages, - fields.newL1ToL2MessageTreeRootSiblingPath, - fields.startL1ToL2MessageTreeSnapshot, - fields.startArchiveSnapshot, - fields.newArchiveSiblingPath, - fields.proverId, - ] as const; + return [fields.previousRollupData, fields.proverId] as const; } /** @@ -103,14 +63,8 @@ export class RootRollupInputs { static fromBuffer(buffer: Buffer | BufferReader): RootRollupInputs { const reader = BufferReader.asReader(buffer); return new RootRollupInputs( - [reader.readObject(PreviousRollupData), reader.readObject(PreviousRollupData)], - RootParityInput.fromBuffer(reader, NESTED_RECURSIVE_PROOF_LENGTH), - reader.readArray(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr), - reader.readArray(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, Fr), - reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(AppendOnlyTreeSnapshot), - reader.readArray(ARCHIVE_HEIGHT, Fr), - reader.readObject(Fr), + [reader.readObject(PreviousRollupBlockData), reader.readObject(PreviousRollupBlockData)], + Fr.fromBuffer(reader), ); } @@ -131,18 +85,33 @@ export class RootRollupInputs { */ export class RootRollupPublicInputs { constructor( - /** Snapshot of archive tree after this block/rollup been processed */ - public archive: AppendOnlyTreeSnapshot, - /** The root for the protocol circuits vk tree */ + /** Snapshot of archive tree before/after this rollup been processed */ + public previousArchive: AppendOnlyTreeSnapshot, + public endArchive: AppendOnlyTreeSnapshot, + public previousBlockHash: Fr, + public endBlockHash: Fr, + // This is a u64 in nr, but GlobalVariables contains this as a u64 and is mapped to ts as a field, so I'm doing the same here + public endTimestamp: Fr, + public endBlockNumber: Fr, + public outHash: Fr, + public fees: Tuple, public vkTreeRoot: Fr, - /** A header of an L2 block. */ - public header: Header, - /** Identifier of the prover who generated this proof. */ public proverId: Fr, ) {} static getFields(fields: FieldsOf) { - return [fields.archive, fields.vkTreeRoot, fields.header, fields.proverId] as const; + return [ + fields.previousArchive, + fields.endArchive, + fields.previousBlockHash, + fields.endBlockHash, + fields.endTimestamp, + fields.endBlockNumber, + fields.outHash, + fields.fees, + fields.vkTreeRoot, + fields.proverId, + ] as const; } toBuffer() { @@ -166,9 +135,15 @@ export class RootRollupPublicInputs { const reader = BufferReader.asReader(buffer); return new RootRollupPublicInputs( reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(Fr), - reader.readObject(Header), - reader.readObject(Fr), + reader.readObject(AppendOnlyTreeSnapshot), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), + reader.readArray(32, FeeRecipient), + Fr.fromBuffer(reader), + Fr.fromBuffer(reader), ); } diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 4f328cfa0d97..cd35c218f41d 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -148,6 +148,13 @@ import { Header } from '../structs/header.js'; import { PublicValidationRequests, ScopedL2ToL1Message, ScopedNoteHash } from '../structs/index.js'; import { KernelCircuitPublicInputs } from '../structs/kernel/kernel_circuit_public_inputs.js'; import { KernelData } from '../structs/kernel/kernel_data.js'; +import { BlockMergeRollupInputs } from '../structs/rollup/block_merge_rollup.js'; +import { + BlockRootOrBlockMergePublicInputs, + FeeRecipient, +} from '../structs/rollup/block_root_or_block_merge_public_inputs.js'; +import { BlockRootRollupInputs } from '../structs/rollup/block_root_rollup.js'; +import { PreviousRollupBlockData } from '../structs/rollup/previous_rollup_block_data.js'; import { RollupValidationRequests } from '../structs/rollup_validation_requests.js'; /** @@ -788,6 +795,10 @@ export function makeGasFees(seed = 1) { return new GasFees(fr(seed), fr(seed + 1)); } +export function makeFeeRecipient(seed = 1) { + return new FeeRecipient(EthAddress.fromField(fr(seed)), fr(seed + 1)); +} + /** * Makes constant base rollup data. * @param seed - The seed to use for generating the constant base rollup data. @@ -877,6 +888,30 @@ export function makeBaseOrMergeRollupPublicInputs( ); } +/** + * Makes arbitrary block merge or block root rollup circuit public inputs. + * @param seed - The seed to use for generating the block merge or block root rollup circuit public inputs. + * @param blockNumber - The block number to use for generating the block merge or block root rollup circuit public inputs. + * @returns A block merge or block root rollup circuit public inputs. + */ +export function makeBlockRootOrBlockMergeRollupPublicInputs( + seed = 0, + globalVariables: GlobalVariables | undefined = undefined, +): BlockRootOrBlockMergePublicInputs { + return new BlockRootOrBlockMergePublicInputs( + makeAppendOnlyTreeSnapshot(seed + 0x200), + makeAppendOnlyTreeSnapshot(seed + 0x300), + fr(seed + 0x400), + fr(seed + 0x500), + globalVariables ?? makeGlobalVariables(seed + 0x501), + globalVariables ?? makeGlobalVariables(seed + 0x502), + fr(seed + 0x600), + makeTuple(32, () => makeFeeRecipient(seed), 0x700), + fr(seed + 0x800), + fr(seed + 0x900), + ); +} + /** * Makes arbitrary previous rollup data. * @param seed - The seed to use for generating the previous rollup data. @@ -895,22 +930,54 @@ export function makePreviousRollupData( ); } +/** + * Makes arbitrary previous rollup block data. + * @param seed - The seed to use for generating the previous rollup block data. + * @param globalVariables - The global variables to use when generating the previous rollup block data. + * @returns A previous rollup block data. + */ +export function makePreviousRollupBlockData( + seed = 0, + globalVariables: GlobalVariables | undefined = undefined, +): PreviousRollupBlockData { + return new PreviousRollupBlockData( + makeBlockRootOrBlockMergeRollupPublicInputs(seed, globalVariables), + makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH, seed + 0x50), + VerificationKeyAsFields.makeFake(), + makeMembershipWitness(VK_TREE_HEIGHT, seed + 0x120), + ); +} + /** * Makes root rollup inputs. * @param seed - The seed to use for generating the root rollup inputs. - * @param blockNumber - The block number to use for generating the root rollup inputs. + * @param globalVariables - The global variables to use. * @returns A root rollup inputs. */ export function makeRootRollupInputs(seed = 0, globalVariables?: GlobalVariables): RootRollupInputs { return new RootRollupInputs( + [makePreviousRollupBlockData(seed, globalVariables), makePreviousRollupBlockData(seed + 0x1000, globalVariables)], + fr(seed + 0x2000), + ); +} + +/** + * Makes block root rollup inputs. + * @param seed - The seed to use for generating the root rollup inputs. + * @param globalVariables - The global variables to use. + * @returns A block root rollup inputs. + */ +export function makeBlockRootRollupInputs(seed = 0, globalVariables?: GlobalVariables): BlockRootRollupInputs { + return new BlockRootRollupInputs( [makePreviousRollupData(seed, globalVariables), makePreviousRollupData(seed + 0x1000, globalVariables)], makeRootParityInput(NESTED_RECURSIVE_PROOF_LENGTH, seed + 0x2000), makeTuple(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, fr, 0x2100), makeTuple(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, fr, 0x2100), makeAppendOnlyTreeSnapshot(seed + 0x2200), makeAppendOnlyTreeSnapshot(seed + 0x2200), - makeTuple(ARCHIVE_HEIGHT, fr, 0x2400), - fr(0x2500), + makeTuple(ARCHIVE_HEIGHT, fr, 0x2300), + fr(seed + 0x2400), + fr(seed + 0x2500), ); } @@ -954,17 +1021,19 @@ export function makeRootParityInputs(seed = 0): RootParityInputs { * @param blockNumber - The block number to use in the global variables of a header. * @returns A root rollup public inputs. */ -export function makeRootRollupPublicInputs( - seed = 0, - blockNumber: number | undefined = undefined, - slotNumber: number | undefined = undefined, -): RootRollupPublicInputs { - return RootRollupPublicInputs.from({ - archive: makeAppendOnlyTreeSnapshot(seed + 0x100), - header: makeHeader(seed + 0x200, blockNumber, slotNumber), - vkTreeRoot: fr(seed + 0x300), - proverId: fr(seed + 0x400), - }); +export function makeRootRollupPublicInputs(seed = 0): RootRollupPublicInputs { + return new RootRollupPublicInputs( + makeAppendOnlyTreeSnapshot(seed + 0x200), + makeAppendOnlyTreeSnapshot(seed + 0x300), + fr(seed + 0x400), + fr(seed + 0x500), + fr(seed + 0x600), + fr(seed + 0x700), + fr(seed + 0x800), + makeTuple(32, () => makeFeeRecipient(seed), 0x900), + fr(seed + 0x100), + fr(seed + 0x200), + ); } /** @@ -1040,6 +1109,15 @@ export function makeMergeRollupInputs(seed = 0): MergeRollupInputs { return new MergeRollupInputs([makePreviousRollupData(seed), makePreviousRollupData(seed + 0x1000)]); } +/** + * Makes arbitrary block merge rollup inputs. + * @param seed - The seed to use for generating the merge rollup inputs. + * @returns A block merge rollup inputs. + */ +export function makeBlockMergeRollupInputs(seed = 0): BlockMergeRollupInputs { + return new BlockMergeRollupInputs([makePreviousRollupBlockData(seed), makePreviousRollupBlockData(seed + 0x1000)]); +} + /** * Makes arbitrary public data tree leaves. * @param seed - The seed to use for generating the public data tree leaf. diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts index 6c09d4d2b16f..783f5e5c4f0e 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts @@ -26,7 +26,10 @@ export async function deployUltraHonkVerifier( const { BBCircuitVerifier } = await import('@aztec/bb-prover'); const circuitVerifier = await BBCircuitVerifier.new({ bbBinaryPath, bbWorkingDirectory }); - const contractSrc = await circuitVerifier.generateSolidityContract('RootRollupArtifact', 'UltraHonkVerifier.sol'); + const contractSrc = await circuitVerifier.generateSolidityContract( + 'BlockRootRollupArtifact', + 'UltraHonkVerifier.sol', + ); log('Generated UltraHonkVerifier contract'); const input = { diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index c43e5ef32b63..4f6d31e5cfce 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -428,6 +428,7 @@ describe('L1Publisher integration', () => { args: [ `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, + `0x${block.header.hash().toBuffer().toString('hex')}`, `0x${block.body.toBuffer().toString('hex')}`, ], }); @@ -529,6 +530,7 @@ describe('L1Publisher integration', () => { args: [ `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, + `0x${block.header.hash().toBuffer().toString('hex')}`, `0x${block.body.toBuffer().toString('hex')}`, ], }) @@ -538,6 +540,7 @@ describe('L1Publisher integration', () => { args: [ `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, + `0x${block.header.hash().toBuffer().toString('hex')}`, ], }); expect(ethTx.input).toEqual(expectedData); diff --git a/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts b/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts index 522e295d58f3..48c7160d5cf5 100644 --- a/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_proof_verification.test.ts @@ -75,7 +75,7 @@ describe('proof_verification', () => { language: 'Solidity', sources: { 'UltraHonkVerifier.sol': { - content: await circuitVerifier.generateSolidityContract('RootRollupArtifact', 'UltraHonkVerifier.sol'), + content: await circuitVerifier.generateSolidityContract('BlockRootRollupArtifact', 'UltraHonkVerifier.sol'), }, }, settings: { @@ -121,6 +121,8 @@ describe('proof_verification', () => { ); block = L2Block.fromString(blockResult.block); + // TODO(#6624): Note that with honk proofs the below writes incorrect test data to file. + // The serialisation does not account for the prepended fields (circuit size, PI size, PI offset) in new Honk proofs, so the written data is shifted. proof = Proof.fromString(blockResult.proof); proverId = Fr.ZERO; aggregationObject = blockResult.aggregationObject.map((x: string) => Fr.fromString(x)); @@ -128,10 +130,11 @@ describe('proof_verification', () => { describe('bb', () => { it('verifies proof', async () => { - await expect(circuitVerifier.verifyProofForCircuit('RootRollupArtifact', proof)).resolves.toBeUndefined(); + await expect(circuitVerifier.verifyProofForCircuit('BlockRootRollupArtifact', proof)).resolves.toBeUndefined(); }); }); - + // TODO(#6624) & TODO(#7346): The below PIs do not correspond to BlockRoot/Root circuits. + // They will need to be updated to whichever circuit we are using when switching on this test. describe('HonkVerifier', () => { it('verifies full proof', async () => { const reader = BufferReader.asReader(proof.buffer); @@ -180,7 +183,8 @@ describe('proof_verification', () => { logger.info('Rollup only accepts valid proofs now'); await availabilityContract.write.publish([`0x${block.body.toBuffer().toString('hex')}`]); }); - + // TODO(#6624) & TODO(#7346): Rollup.submitProof has changed to submitBlockRootProof/submitRootProof + // The inputs below may change depending on which submit fn we are using when we reinstate this test. it('verifies proof', async () => { const args = [ `0x${block.header.toBuffer().toString('hex')}`, @@ -190,7 +194,7 @@ describe('proof_verification', () => { `0x${proof.withoutPublicInputs().toString('hex')}`, ] as const; - await expect(rollupContract.write.submitProof(args)).resolves.toBeDefined(); + await expect(rollupContract.write.submitBlockRootProof(args)).resolves.toBeDefined(); }); }); }); diff --git a/yarn-project/end-to-end/src/e2e_prover/full.test.ts b/yarn-project/end-to-end/src/e2e_prover/full.test.ts index 926e19d98ff7..f5881b11a1d9 100644 --- a/yarn-project/end-to-end/src/e2e_prover/full.test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/full.test.ts @@ -79,7 +79,8 @@ describe('full_prover', () => { // fail the test. User asked for fixtures but we don't have any throw new Error('No block result found in test data'); } - + // TODO(#6624): Note that with honk proofs the below writes incorrect test data to file. + // The serialisation does not account for the prepended fields (circuit size, PI size, PI offset) in new Honk proofs, so the written data is shifted. writeTestData( 'yarn-project/end-to-end/src/fixtures/dumps/block_result.json', JSON.stringify({ diff --git a/yarn-project/end-to-end/src/fixtures/dumps/block_result.json b/yarn-project/end-to-end/src/fixtures/dumps/block_result.json index ac42be5f64a6..266cf49864e7 100644 --- a/yarn-project/end-to-end/src/fixtures/dumps/block_result.json +++ b/yarn-project/end-to-end/src/fixtures/dumps/block_result.json @@ -1,22 +1,22 @@ { - "block": "10e008bcf207d63bd25e82e72644aa35f6bfb1b8827bb9c7a7a3ce5288d6a22900000009000000000000000000000000000000000000000000000000000000000000000200e637eadc87f80d3e574218461b3a9919c965f3ea08d1c282de05c19c28ada100089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c0007638bb56b6dda2b64b8f76841114ac3a87a1820030e2e16772c4d294879c314f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000902b6118e9af4cf0cc12d53eea4d7095cde7944cff4505a918266f9fa51d6ce3aa000004801c17581727b72ab3a225b6a1e750391cf771e2e8ef6f39847862b6e6ae265496000005002ab90d519c7d55bb710ba7ea8a17ff66f07890b3b25957a6f588e82c240f8f83000005000000000000000000000000000000000000000000000000000000000000007a6900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000f0000000000000000000000000000000000000000000000000000000066bb844a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000017d838b22e8e05b3bb1478f7158f8d7d86e8afebd8eb1228012f722663cbca9c257058e10000000a0000000200000000000000000000000000000000000000000000000000000000000bec47b00205f15d1a73672a4c8bf09360ad0182a56cded22fedf1e0d1e32d49b5fdbbda13041d013fce38a2244102529a0bcf81e16635536b0eaef4f4a221610b5df89524020bd8b3d97120eff1d091b4dde76e63996616d6ca0032dbb54d6d9b0732f1bd6f0bae0ba644d22b9c76fbd3d99b13442af7382d686d49622ea3b83fe90679d57700000000000000000000000000000000000000000000000000000000000000000408000000000000000000000000000000000000000000000000000000000000020400000000000000000000000000000000000000000000000000000000000000000000040c000004080000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a088d80f95d8a41d57e710d5e41c7520def4677a1fd79ef3ef4f69b391e49c1e2f507cddf70154642d6b191401d1e19d088fd211a401899df7eec5c49b4070f8f60105345d7d143d073221aa533d90a989b0173edc1cc3b972bdb39f84808b1960698e4f7524852fae8b3480494e031e55958a537c167b7c02c51334c7eeeec057f2d97805fe5f71eef8089f66ac2e65478387deee4218be94ad9fb05eb2312eea650721c25ae1f827fd0121ae4f74459751fd1914518b835c2acf54d2442df604ec7cdf852a4f06514274766748f087e3e1763a3d90eae579b1d575df22dbfe05edc979870cac6b3448a90b85fb6afe093ee07d38dc8a3b38828202b8ffb3214db1dcda737e3a532c9473a8659a834350f7c49df05eeb597ef4483a4cf231097eb94089565a372800e383bf52322f69d3050f30308afff240a264ffadaba3656047c188f0b64f72607d36edf5c1587bf7ca66a3ab52316aee168abbc4ee51b63601230e5b72a9f5cd280dff949a0fdc9e9037a4ab3a8ba10ad7f202ad85f4972ddb62dbe8b791b53e472628816f2aa325540b252976ca7cecd12ebaafde40066869c9eaee1332818fb4ecebf27f18954e3aca8175c0fd4c69d0af6ded472949000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000457f49b5a30ec53d2e0d18aab91fe8754da77e473da748f12b230ebf3d0968e29592a172aa9db76148355da21adc1729c0d8159ffb95b88fa0de91467f7b9fc42a6a433f485684596b0fde220a98004d69272b9a24899ea6e96570a0db5c4cc49db99d1e99c812540218c419dc91efa4922a7f64680956b69488c296061934163fe309dda2d44432fb2dfba215ce3fd02ad0764b35eef08376c6a69cd1aa959d7955310944775d0e7343a3c7168bd644b3f34fe197e523717c48f7e581ab3055875325e43d12ce5852e2e6d2fcea0d3ab2c0c27193bcd6710bc411c40977fdc841443757506c225b2488da4ec24b4085bbba09093f08abe767630f77f957823941dfe2993bd279b28821d4003d3fa0b36bb41df35ef738e7643e9566975cd6c2dfa09ce433ffb3748886ddbe3840cee01f5efa1835c31a587dc62466d02066901fb44754e26a1195c766230c8087b83b909ed2f5efc0650f4dcd7abacff4d52c85c33380bb40cef56c71f77ba0fea8690da9faaf0e5222e9ed285c7db18a9c20d2f1595f03f06d2bdafe3083d578fcd997215c8d4ff97b140637254c3484cd81604b4342e5d3bd98ec7c406999c1577fc0105606116d24d5df98907586ab820000022800000224000002201e39f75b5116a9749fb322176027b1ad85a1f7e1e35e69bb6c31a3a364410f5d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000086c793a719632385d608e782944590323a7ef4ce6a3e5cb931ffc0bc228474231ade4b511273e4bcb24b908d97ccc415a2c025ce26510473ebbe9a0c3343007654e0b62791d4a34862530e4095534cc0a23d25cd3b687aefeb14f9456e8653ad86fd417c743ebedba7a2cf48fd5c74538cbea0951e32c56527bc0b673b657de25d5687a49569f8f5b797a88d76ea35723771119aad9b33bad94a7ba9667e7dcf7013c4819a6000f73f7e1a2a40a2df12f9430c91db454a6931941f6b5a17c4436c6f0e0d106dc22be4891a265609e21c27742f187eaf36a3e364e49d781733e347337c80dea9988b12d06d37c14ed999979b6586c4b6224d81ef195d8df4ace9122c58d5fa756d0fabb7640de2c289e053557691019c73dbb83b8c19053249f7344f9b767ee9583d7275a0f4370003bc6657b07d5debcdcb317bfef4e668ccad714e7e703eeaba0c4dac442ea11e028489846c5817d43b59a30c005d8d7fde7236c46eb661c4118042976b51607bab139e5d312a39a6967928d493b29ccdc3904b8d1b526352a1f649ca7b1bf9d8958373b45e5e9b89d4d2fa9b7d221333e96b58de23c2a55ddc33c691d2fe2f2ff81f51cfb5faa2fa843a04cd682a9df73e40000000040000000000000000000000000000000000000000000000000000000000000000000bebf10200012ad84fd7d4a3f745e53ed5087cacf6f0509b08f404e19329a414abee51aaaa770002119f2715291daa2ec939db36562f9ad86025e8646272bdd8d058de8d38e2c166000000000000000000000000000000000000000000000000000000000000138816bd76ba818bc49e3e364b1592b2a5501f8ca6c04229cde3fc4f58b7c19a1f84000000000000000000000000000000000000000000000000000000000000138800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000400000000000000080000000000000000", - "proof": "0000002a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "aggregationObject": [ - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000000" - ] + "block": "1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200d09e7feff5a1049661763ded52742f02aac5d9793b27a40d6b9c60a668bdf200747f2ee8836d7dd230b97572463dac0576259dedd98d86c56e2275d6d670d30007638bb56b6dda2b64b8f76841114ac3a87a1820030e2e16772c4d294879c32818d15e97da7dd64b32439cc63b7d03601ccadce81009ab9792a2f487795d30000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a90000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dfe666201fc6aee3c5fdcd21deedfa71790c6c7719d2af6919c068ef9b9f4c30000000200000000", + "proof": "00003e84000001f40000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000006b00000000000000000000000000000000000000000000000000000000000000011200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000000000000000000000000000000000000000000000000000000000000011dfe666201fc6aee3c5fdcd21deedfa71790c6c7719d2af6919c068ef9b9f4c3000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002ff40e125f72283eb833736e42285701b003d9e4270756ad3c5ba36ee0dbae760000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007638bb56b6dda2b64b8f76841114ac3a87a1820030e2e16772c4d294879c300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001e54491432d6c962973b71fcfd7b5597486f108bc66cd620099a65702fa0181b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f3dac4a356c5fdd79000000000000000000000000000000000000000000000001f8b97ff8c78f3f9f00000000000000000000000000000000000000000000000cf72074e065bc22b30000000000000000000000000000000000000000000000000002dc10ffccda590000000000000000000000000000000000000000000000047bfb4dfb23cc889f00000000000000000000000000000000000000000000000871e52274633f4bf70000000000000000000000000000000000000000000000013c49830a0ce95ff20000000000000000000000000000000000000000000000000002c5ab7c0bb98e00000000000000000000000000000000000000000000000b0f322f57a86900ed000000000000000000000000000000000000000000000002e496ababf56e3cd6000000000000000000000000000000000000000000000005dd1141d5deb2050e000000000000000000000000000000000000000000000000000268dc87f9458f000000000000000000000000000000000000000000000003334a597cd9ec0a0e00000000000000000000000000000000000000000000000645a57625996ab518000000000000000000000000000000000000000000000006a2f7ffb16256c45b00000000000000000000000000000000000000000000000000027ca8c331291b000000000000000000000000000000f4ee11b0bde295507e62592c4338bc1f290000000000000000000000000000000000268abf37ebdc51e432b543b9cd13eb00000000000000000000000000000080fb57196db1edb119c4a43096ca53be4700000000000000000000000000000000000c10cb089cb171173b34c8ab3b952c00000000000000000000000000000064a8f95c06b2f690c389f6e9ffbac2f03d0000000000000000000000000000000000305b974421c33e120c6c35e2d49d7e0000000000000000000000000000006360b1b9dbd90a9b1ccb3cd3bc78cd9ecb00000000000000000000000000000000000cafe05c1184abbb1673bebfbdfd08000000000000000000000000000000e9a8914e09dba59c9d0469eac4258a756000000000000000000000000000000000000aab18264ff95058a2bd32aa92ef6f000000000000000000000000000000430eafce70b21dd200f81e952ae95ccea2000000000000000000000000000000000027f21d866b6071e7d5888a222b23f200000000000000000000000000000035e18690ea3538d27c3eb3e49ff771858a0000000000000000000000000000000000253559923d3ef097c24944960baaca0000000000000000000000000000000409efa67b85eec9db156ab6a7caac5f9b00000000000000000000000000000000001379a24c97a4e3f27a72888913923c000000000000000000000000000000968bb1d5b9def16c7ba73eb743ab2082a7000000000000000000000000000000000007ca785c1c0cfd82fbce0d58a4dd19000000000000000000000000000000edd674059047c461b4014800a82e531a550000000000000000000000000000000000227a11996c17a4514c44e17b3e5b92000000000000000000000000000000eac24cd380dd1e269e973ef0e61f1ceacd000000000000000000000000000000000005e248521b8b312e944537ae68ecdb000000000000000000000000000000d2d1bc3109deba532ba528c5732d0a5f640000000000000000000000000000000000015dc7ad1bc5e912a04cb7fa12ff760000000000000000000000000000007c9ccd204792110580933efcdc4e4429b500000000000000000000000000000000000dc9d4003e306af0e5bdf9671ed3b9000000000000000000000000000000a33a9a871c7276da857c0bddcfe44bbf8300000000000000000000000000000000002cb33c7ca066c3c2dcfa000582713e0000000000000000000000000000000283503468afcb144e05a54e030a9a263600000000000000000000000000000000001c444beb50f664ebc2590d48488f1a0000000000000000000000000000004ba5bd89da5d04580ea93ca6e0569750920000000000000000000000000000000000219de77d73911f7d4b40973b92260d20c3f91a2efb28325f9e9c7865a0785c99e3e78307f91517889b623e5248c1920fa05558b23677f758b1a93e1be0e0008e5000c571c05b79bb4693559db73e6f1a5162de6db9f031e6c0d24bbfe35bc0b6efafece270978e59125b4973a997e31c579d28bd16855fb8e7019f0624f0ff8d201cc7331881b9022884717556a7a910df1d8d4f02a36374e38d9ec8cf380ca5c577925a79479d298593db4d0e676a153bcc1799ae8895ffaac643470b7d08e3ed2b39e276ed5620899c30b53ddb180b60f927b3c132347fb728509be0db26cc7461185a6514559b8fa1b648a8fc4a2a8d01adcc80c703889d7085ac6ec52b081316f493ccf91bd5ec3b00c9ca9a392e014410ce4234c9ee9dc4046a89608910a607f85241cb239b4cf7f243af905f1e611e72fbbe893a8994d50241f507c1eef1e806a456b39fc5e823c07bcf897c2f20f1a0e0804767b56b7e14dc82678c3f03b3dd3f22528e6fd5cc4964dd1cc72d5c0d02b0448eadc2f8582fad135db51484d2082a67671b12fc9c19d173fb4119f0b799734b7223bc2056fb84a7798250cb27fa8111bc7f9fc4bf1439ed0b031ec89ed76f1837d4ec12c4b2ea6043510f34d5256445911d477e444338f83d37152974ab6c2851a4925bc5821ced7e10b18aca14c98c1c01ed759d09310273bd0cdb0e8928bcae871efd91d81146bce75ffdafd5dc9b636d121237a15aefe4ff2558b4e99f5aa79a83c160f622073577c99dfd85bc90a921468b5def1b5e6a5813a21e2cdd0fba4d28ab9778b18e2e479db80dea0f335ba0c99f48c6133a1cdd07461930985f5865c1f0b366270971ec3f16e116402951a2cd461365ab808a7d2490cec89fd1b8a8f7553efd519e09529e4eb27a5d5a2398a3010473a191ab09277bc2fbf183afef00e98419a93d6c0169633acff37be4ab5f2183e22c0ec2f214e363cbb7bdda19d2953138d091b902e6e067f4b11e07213175ff514ddfd6662f48f9edca6f052323b8751a5243993f497c10a9d2aad798d875e0caee6e2d0f11ef53cfa924be4c88950b2f8945a4c1b1687a76b3bda7c5f7c0c9b55e71d6db1eb47d29d356167a16eea82725dd97766a0f507d88c6e2e46d4b05b23d538b16163d2eafec2e3803ae06a8936145dc5fc5a9d5f5f449a4da772bcd8393f3839c2297b279aab987372a939f6cc3a8a17893b88ced49e650a7435a0207c0b3d3dd1fe89bc2464bbacf010fcf500b666fac9fac31edd1dc466200b9349c75277a4b0a4c0696b9053d260df2dfdfd28e1171a992d0a44ac4a79ee28358e43d712e04202b8e2915cb56aa73518d40b1ca8dbb8ec3db522e6c7cecb6a1e3820b3ec7f3111e26921335ba8970b1c83cea1d5c2f9a01629825b9f082fbe2692c30da51320a275bb5e10a3ba1c39fceebc54ea1773e6df342be5e10b6b92934d943a3891510d8124b20c13df7fc3febaee595d47eeb7d91a06c6938e354e0242b7f800b7515c391766e31ec5cebfae28caadd4f62f8cdaf9715476ebcefea6047ee8d5c1a08be8ce510de1b1ef721232bdfb66b5aa8a2d3936a72f61866182106b6c04fb126da8402d04044fc34320ea7dc25529adad17903aed9b6ba818fd1545d5d9a8e261fca9c2dd9f4e46ada8b6dab7597b138f496b23ac9130b5e0809ff7a7f81022650da504f0cf0e569f01fcb54f987b4077df2ab9ec7475cc2ee32f61766a7a01c71a792b2bbb401fb5bcc06c53bf885ed51eccb40c1d3dcb77738b12bfc1c7c2f001cdc50e33714ae43eec90a111e3a95c6a2f3e562be4ab584223da1eda2c71064e6eafa71b5b83e51ec68e7a2702e0a4b150b9bbc4439b7dbc2edda5c443205eba641ee7ca513f36d73e89e168eae1253038be675a202bf49f8e0d01c3ee027c4c71190b85111a4a993acd70d5d59a41c3745517ee9454884aad102c3a7460294a40fb2aedb6d7e393491c86cf95592f8eb4e341a9383048ac54b95a98fb91d72578a1f41fe8df57999f4fd5803a1d804e7817e8a313216e7fbb20ef4bfae1c7abaf6230db02f7668ad099b6ac61eb58ddbb6ff081b11494d88e7d08e7b0e1d5151bd5f45ad09bd5d6bd55bc4877298f397c43756f385468b54490f6dc47510a7a4cb94179d0786421ff47903284154fd27b0c855f1afdcfaa89fd255b45008de625de179b91e5b38518c7edcc4ac3839c7c509af9e6612d48fbb6cfb50790d10dc8189a076946bc04e7c8eedd1d9c1c59c3f8cef6084560f127a6c887f4e18a35a3ab369418cfc8ac80d4d1d499be3ffec00c99b1228cebd5b22f3f9e65c2a77f0d8c3d31007f194cde62aac442a0a393cbbba694d7ae12f6d923f2f604426a885e6fa355c5175d4b07d75871c1d63cfd2d41e7fdd9fbf263b46c99bc1a6261200b5c29e0429f41890894a5fa3cb2434c68fabf6d6244b6f60ccb9545e3420fef06a696a636f93caefbcde2b199463402141bf92061a6d5ca1364bbbdbd6144ae133a44eac10081422a15b2566d5cd830da3a631056bc03e7f516bd15ce22c1186cbb384febe13f34df839737c0f1246573f0ccafd319e641e4c41853cb61cb2c6e5f5823a4680fe13e5e91146bb03eb22c7080aa5e1d14c4a7f615455b818e66c7bf833d780dc90aa5bf3c7e58f486c02986f1c551bcb6bbfd8a41f4971050e29cec4cf0ab4ece2b9214c7f2daed4b015cac222fca15feac05f41ebbb90230cd65386bd237067c3164b158e374784734b5d318403001f6836597dea514f2f966959a8b2cddfa0b6d5b3cd3193cb8961157ae0157943c4a8b0423f8c22a222742cc7ce54b1bc8eeb9e3d54396ceae8eeecb03f1cdc61585a4833743bbbf423861708b6672940f39c4d30689675e73238bdb0cd0b6f2eaab9d6daa5bb7cba1be67b411470e13b86af488cf6ef1f7a0748d52fdef40e51a45eb8358579e2001e6e499a9fcdbf253c4e14ae6c75c75c95097e8440edb9a794209d14bf9b096b01417c8a62766e3514850c040dcf1bd57198026b3a0230df831e90cf0a42d3fc29c51e9b0df6c9776190a624cec3c0a174a6dc72e6504d1e2084197ef41a90f71da03690381582c4805ff33db82a6dbc22b43468096b5a8b3135e5a3020f3c610af5c738bafeb2766c800c62db8d4c9e0439adc18f6837f5b1ab9a6f8f54325d0d89314b393a45cac216729563f8f491e637910021b1b6841df2630cf0664401123dcf9f67697dfe3f9dfc320ef75403c7366c86230df14bbc5949683b1f7e101332826dca63e48427b0b4d5d63a40ffb3aa5a218314e8b2192ef7463893ce99018e6f655bb29a298c74052d5cc0f98e6b7f600fa1d7cd3e2851a40e8fdd62b80efd4f22af41916d69c491126aff5b33f3bee2b5c3f7f036abfc5f584a6d39af279b2ba34a897c0732981fe04d212e62cae5cc4f5977b3e0b34068a555c0f4aa0856249aa4710a61d0f964c307828d20c33f9b63367cbbe45f6a3e43e1920505042f24355d1a5ae06abb18e1f57f1d83b5c5df10f3eaac777c9c32e79472cbb00de22b3ca4aa28fe8c8e7f445ad92b29198332bc5c83196bb52c8a28e12f92b9038610671c4f8070b088ec0c05cafc506bf658aa408df9f2d67e4654313a23c817771f0bf5fa4c65f3863106ca75926c6b6879542658d5d926f03031abf7614427b547e5c6871ae6ac39b1c7fb6c3a575a53696085a0e2483a7408012da3f4e122027ac76b7c469e3e99c3db7878fc0db08b107be62918ca5285e3d1ab7694700283c066c81606e966e4d491b0697d11602de7514cd41ef647c0731855ec8da5303bdd477bfb8f7072b0e1ed3355718bbe5d696f0e26969810aa60dde2091fff275394c7a0f9b50a8d9371e95181d164eff0da35ecaa736c5c6e74c9e5ef00e20fd2f16c7e0c0cb972a4cf6f31a38818c67f16b74bc20758d24ec212d02bde762d956c1a157c1dc40cd8c1ac7810cd1513a74daf430241f593036c6bb4a22f1f05f2a899e13342493d11092351fab93680ada41df6d9abc2c89dac9e52d9f169177992e868b700a1931eaa60239e9f3ecf4363f2f4e11c1573b101da9013de1f2c0b5306fe527c45cf4c570a8c0381f0d3ec37295f53a4d99e3cc17fe60fc6762181aca245535b2bff6172e615e9a6aaeb7b7ef072de6ac6bb16ee088d8717e7147ed8537f2102c55d9309f4ad82c0eb27434bebee4797bf573199107a9b6e1f0ba7a3d2f4f6f6a5d57b8f4bfb250eae52eadcb7dccf8034f35366da8d1d13a92f9e614c0f590afad82a14b4fbe124b66aa439d17fa90024fe93b58e43ef93ac0af67c378aa5af71700602f4f8b87eba98eff57c660a6122c7d2e40cd5efca992912c0d6b78d88de7920809aa66ba74f245b01504ce8cee4eeefa51faf7a4c4d2cb1f0efab958c0e007f3ea281ede0930b97daa5d3e6b85ec22e2e2384deefb60343bc64efc8b156f54ad97a6f6a1fa91057c332575e56c01913f39d285b71dc06aab65f896e2590e6806ce9332955e0edf6d485c2e51466e1742d17e437a63c2795504e7dd7056e6d9975d60a27fa496c008696486c2b2135a095afe70e725d17193efe90ae26943854b35cee9f9b656b8616c22a955e307439645e4ad2736b1baac9e704d35fd2c502ebe8a1297a525e8b50b0ed2225cec9648dee61e54ae01c43d9de2249cb108d9f30715e2f6d4c1a411e58cde060e538b413ec1505abe718df610004a08f7ec0157efd617086e271381a3413e73327037ba8531a8dbcae0a4b909e3c22453979cbbba7c9e8b338eb7963b6a2897559a35be81479e4265a26ea6198415b70fd4f12c48c72c90cc777dbaa8a0daaaeecd1705aa7f8de1e2e1fcbbbdd9c79310b7618299468be821f95c1de326161ce26439c05beb5a0d16909025e276e1de97d3643187c340acae1547e36162bed13320222a7690fcc95d31f05a6ced3f78a27bb4fc737d66ce18025f95b254e29b28ff371c33ea1bd8e132de3f054361005d06033267c285b3d9baaef4f929b1d1182fe0b5c18b50496d4143d760243baf23fdc07d7384e9ae5330dd37f71a87758d5412cf1a139da5c3e0929f3bf6cb32fbd157308fc5f2ba0b8af838e93c7e3e1fee780e8a91b5ba7161fee73232e207cf7f91c65a2ec78d494eab6bf0e3f477dc32b7c0b78c521106e02219a2c26e76d1a1c173ed0bbffc02063019c0741f268fbbfa4f160e7fec1c516300372af2e190ed7f4212af578ca73017382150fec38fe0b5c279e1fb5a30611751f2b2ceb7003e13c353335bdfd021037ac2e164f70b7369b831188a5d3301c9b81bade3b43ca3e32ab4d1e1c3df573fff48435c41afd9e182bcc1c725f4f2416b598d7f90c4d87d2d48e377235155a7f34f0ed9d5960273caefd4868bb600d4c13ea609f29f2b39969780d60cb4d737752b3d7828618d4204a1e1f252c5f2bae870edb31ce84bb1d1fc6be8ecab20a45c184c12d73ccd4d92b64e985f2bb0da0cb84306678bc0f2ec5861f17012024e0eeebe8ca9b71050e8e59ea270aea238716cb6ea62eff41f7b10ead872176959d6ecc6046e3e1bfe600ae368afaaa04261e59bcd2eced7fd1bcadc84ed2788a59e6189e356c5bf9d2872249671a9408616c8b96a79de09262c726bd0a85b2ab758fc30ac52c0807086ddebe44c5d324ddca837c33f01b7c4a190cb1f294f574d6a8a022a858aab49e23020162abaa2b3c74d5bfdf8e677205713ab3524400cab5a78d23d9d47a54d2cc13bee6d4d1002cadcb2c914ff3180d62619508ff811aca4904f4d5c7e34d8cd5427316622d0cfdba13fbb72359562838a0dfe1c07dc96d3f29a3cdcb978c100ac75337f7ed264a617d779ef92efaabf31f8d8ee083ef13951e13b7341a32ea5a206ba0e74807fbdcc9fb35a66425f2468df7e3462a97f90c21c6b6ccf03653dcc4a07a13e4188beb58d73fab9bde45f64462d5df158bc5486e5ffcb61267358818b0a1c531206a02999da5ae0e0cb13cdfcbe448e75e91a085756f91f11204d24b9ac26c9f05b0909517430ce3afc8c0959296f0a1a9b708f468a2503ace0ea90dbec7d2e61293679bc3ca66d5cc8133883cc39c89f1650472d2dbc0addeabac8be5736a0f2927c30423c73636944981a57bf36564e3c454f40a4aeaecbcde88a2045b0d7f22ae98c1431ebb5f8827f43f16e6dc1809df7d40b745507cf40db1ae36a1289c206d0b8015b7cf444af29f7e552aef13d8ee4d0dcbdd0b18b35247663e14548319587bce9e2f949b13c8ca7e7e6a7db53ab00b82e8a0144b89a128f7d513ca7407319f5b0c2ab6cc53d3cf9e27ccdb7e98439506d9873e62c87989d7c525e5c62e1ebecaaef0968a8348a1afcaddeaa543ab271f868f4ff41ee72a782270dcb6095466996accc8f37d6b5054c8f0d3cd25af16c86cec489e5a97a2687f9f13d72f8ab24af842626811592ac14f00f47ae1d66e720e43050bea628a6b99a5e20b19e249b107819b5d02e4ac952fc11d716b971227bbf3bcdcddee7272a5c17d1517f3ef792dfed212cce5b4d30ae9d7bacf4432673c597e94a1011e6280c3fea201a709a353d1d7806ef92d17360bab66478ff180f0dfa3452b705a770e9c56b411bd16547623412a971554709dc5adb66ff0dc0913c1f173bd27f9f4807e72232edc4abf1c224085c921e5cec13064839780ef3e80ef272fed1e63a49b289fa30882621de391a717467195fcf23c2e7c7b043e6ee69d66b1c420f3416c33dedb251819bdcbef3b69a2f1a1dd4e6f9b638efb8b2a958de59ada940fe569919ce30b9138168402ea07cc57bc407820d261747f65c3a60c5d3c72e8dfb37eeb164429a574406a0e051276136f142695deed52557dc804ae74074585605e8cdfdc321cc161214c24b8930607b959c3b873f8887136b7a533609b0cd6e323637df3e719e28cd4075be0a5fe6afbbcb45103ea744a9b117fc79ff7c8e4ed2eb0f6ae5c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000755abab3f7b103f419606830d0d840388082435895411b90d4d8924e338d288225d20cb965acd1e966f30241e086c65dfb43f7a3b3e4a6264efe306397505692f3cae8e2b6e6c66d0ca6d9b1085f3529b78bd6b665110ca038427e6e9d5deaa1bbe84a25e455e3c83c39ca18892a482e47b82b7a89e9b5be7529a62dda6f91d0c7828de87520f74267502a1124d4128b9bf6937bdc4843714ec57484ae3faba0ec3703279721edd0682063125f10f22f57f1436b8c837bfad0d09501afb8b1401cc20f14c3fb3adfb07fd941b424725f3961a029488cf77733e63f3a521d47c2b4566e34ecccc6104ee261c33ead96a55f0ae11e1101dcba121d4c9e5eb2ab10034e2c91d4fe984aaefc2a6ca3613348a352b522ed427b004f5cf0d7bac0b481be7bba8fb303fc198a0a0d128592a6383d06df04cab44900c424b19d6e35bbd1d493542c799cd103e957f7396192d236a3c57bb662f3ae75bfc452022f6522f03c29c5a7aaeb266653eea78f42e09900e9e4938dc3a631de28eee2e9488a2762a77d7b4c7dee04743e2fcd5859820a14c81176e9cdc6309f3362ff1ed57777b0b52e5cf478b27a59deabfad2162bd98d9306dab3b22e59486089afc983ed8862b9a7d10611b8f64f05a7ee0de0b47d4bff6f7e19e7f3cbff886461fec1141552263408d7b25e069229b0f58c7300a2f9737c94a9393736c8c11e4c11341890a1d5019f4fdb6e14253909bc1c68466f133fdc96c3bd64700156e6b4568f37ffd1f05340dc04a7ebe8ad4e96c0d0c240ee10abd9b57c5cefc1258a345c56855df0340e35b136571723ca5c477df6c6ce878b90aec1d72dbe229c5be0e3c6fdec5004286e2554287cc7093855ecd4edc7f28fc3de48e040fc4793716e35dbd7d9a26334f4bfa504942cabff7275317ec9dd5c117542ef13c8f0761bef5232a0f42062539b418b01be15071dd250ebface7f45f09cefaea981d594e1fb651131a9c00e29a561cc35341d9bcf8479f03bc621d77840199eec0cd6322278c7c010f6c0a0c3cbff7f1595854424eb82ca3ed94fce1e88d256cd64a071420fe16a6c05e257d6fc9fbbbcf49f89dd2e55579b233386ca18d09a8fc47f3b2b768e60c9d812a664203c1d1e818c9441fbe811ed45d87846a322cb75e41a93cf0dc9f04b8521bb3763468fc29c6203915e8dcb535206d4bef5889a66ad062a0afb84cbc6cac0195bda43c78ef436e0f701f203a2440604c82babfeb07632abfd03011e82011282d27f4bf2b82a075d7580f601099bfeead7156eec207b3126ef0e955f03dca17fa89336ca99efc58b8a64a0e377e369e56db279c031f27d758a6c3b407edc90f38ac48b13d1f63252ff228e3cc9545a9c7bc4b38123dcf50d3f41b148ba10023a1e58f26d71dab1261af500d59f5d189dd43cdaeb9dc266882d8625bf669f911694cb35ab7aa98c1d1b716d150a4eb113f034a4ce7da5a09196ad3ee4e46b40e8aaf0ba5f8fff8ff4d20121441aef29adfc75d4820f78c44198912a93b2072296d7a2582177afbea66ddb25922caa396fe47961c2877e82f774dcc0c3758930e990ee99a8bccb5a50dc90b6721a874091dc762214e56d5b805595475577b2b28d8582ee519ab7a55bbf31cccea5475eb0eba4b6249cb694e7887a65dca7b922675f4bb3e5dca94f1a93ee6efb7165bf4d680cc122dfc48951b1c213c072e7a2c154c2bf538ca9b03f05fa292796086dacaf21a080c3d809674930a04b23598120096e180bb341e3da46ba1d6e1972a59fab49ef16c1e41ebbb783c0e96e7ca1e978911febab8b024df68a2e29fee6fa9f48f7fcf30dcca3dd13874fd034d5711faaf385ef0ad1c8d23a2f36e7c6d3bd14114522f95544c6cd1f8da0257758a000000000000000000000000000000b2bb591300a297e0a753862627bf61b14800000000000000000000000000000000001154933493f3bfd5a9c11b142c3717000000000000000000000000000000a0aafc71c55c4b8483d9e7aef3086cfc570000000000000000000000000000000000039357f77a572f20464ef0be7f46160000000000000000000000000000009cc129b2954978cc792b1535abe0e678ba0000000000000000000000000000000000210ca7c06f146386f9911070d2c18a0000000000000000000000000000003ced3aa175bdabd5eb4b50e2b1876f096700000000000000000000000000000000000f60d5d5c2236f2f9b27e95561781e000000000000000000000000000000565b50a254b922477f8a1b2493656891a500000000000000000000000000000000001a9b39d2ea0535d2953aaea59a8720000000000000000000000000000000bd15bd55c77c56b6390273d0054bdf4e1e00000000000000000000000000000000001564812d349548f16e4a041ec9cfdf00000000000000000000000000000076773335bab1d7b3e8bb293b1bed7b714700000000000000000000000000000000001fba381793ac902ccd92c2739c6f5200000000000000000000000000000075a46556945910ca4724c38e2abf6a64c000000000000000000000000000000000002c0f17e99fe87402a07fc23224fb9400000000000000000000000000000088fd65468ed8af24dafb5a1760b46f463b00000000000000000000000000000000000090a3201e6bd5559a8c73753c2fe500000000000000000000000000000008a63096070df14d043b9fc73bd466baec000000000000000000000000000000000021f085a0da7846087d69d27449e7e5000000000000000000000000000000661c396474836dfa9270a44f3645f067a700000000000000000000000000000000000ec1483f6cb6321f8a96fcde4ee618000000000000000000000000000000b81c5866353d91337b0f0eabe62c0d700300000000000000000000000000000000000d379ca8660e86b61c308c6e16518e00000000000000000000000000000053656c0b8d26d5d7c4ffbc84cb8c8dc78000000000000000000000000000000000002aa9ba2af5ea1164b866e13b2c16d1000000000000000000000000000000922021c4c51e3061eb00b48915e506ba45000000000000000000000000000000000015eca982d68e4abe53d58d4bcf075c00000000000000000000000000000085c04ffd527c9209edf992b974cbeba83900000000000000000000000000000000002ba65d580a93a360025f56b8f592a6000000000000000000000000000000d0f88ff226a3ef57f1f592db98e1d827910000000000000000000000000000000000042735accfa3e630a25d172a09f3f2000000000000000000000000000000847902d41483ffca3c114c8cdb4cedd145000000000000000000000000000000000023efc8d31277e79f56e7613272eded0000000000000000000000000000007a7c50b57185b83ac18ba3261e220d7fe10000000000000000000000000000000000199b7e3b8e089834da02b8b41c1ba50000000000000000000000000000008acbaf87e441a6f1fa602118b02a8793cd00000000000000000000000000000000002ec63fed5f835087f8e1418d0dfc1600000000000000000000000000000020f9c0878fb3277d8d19485475c595827000000000000000000000000000000000000499e7cc31fa2a39831fdf7df8513c000000000000000000000000000000184428a24e481dfeb599723ba6c02288c00000000000000000000000000000000000187c9d4dedd2f97e8a8fc0fd129383000000000000000000000000000000fd6667fb403d52b2991a570668382a43f300000000000000000000000000000000002230ae1eb1441db0886e5443aa7aa90000000000000000000000000000004459f189b482e036feae0da6e37c7fd7590000000000000000000000000000000000182ca7fc790dff23214209ba3d6566000000000000000000000000000000b95f9880fb46bbd90b5117dadf9e67726100000000000000000000000000000000000f15d77545d650bc2a882ed442b8a2000000000000000000000000000000700923f7fd6c51e4941184ab0d23b2f1db00000000000000000000000000000000001e7521d6bad6991d37eb8cf06971320000000000000000000000000000009d81f92a7875c29bd986187c67b812da40000000000000000000000000000000000007c0de4f110d28addf1d83d7749cb0000000000000000000000000000000f38cd83b6ce267d4aa382e6f207591f62000000000000000000000000000000000002f8dbc0ef0e84723c125d13bd0d1e80000000000000000000000000000007d03360528bc17f1fb1181c979746ab2b200000000000000000000000000000000000bde2ae9c687c6f2e4495cb6ceab9700000000000000000000000000000004bfa8f689b7df8174df8c96f57872ceb400000000000000000000000000000000000177e08abb1a6fd4f220bcccb07e62000000000000000000000000000000d59961d99890eb911c402b007b9cbe976d000000000000000000000000000000000001e44c4d03753c3c68e2f72c29b0af000000000000000000000000000000e10bcda37f9b020238ce42cafba364ad0b00000000000000000000000000000000000a2700ee4bcd2ee1f9d187fb9a58ee00000000000000000000000000000034e61a08bc6a2102504fff403b37d354a0000000000000000000000000000000000019060a7026ab5315caf29609416bff000000000000000000000000000000a0b9296028acb200c6d9216bf1fd05587c00000000000000000000000000000000002537afeb211ab20661ccd797030db90000000000000000000000000000008d6fd1fa3c9a7902ae33325a7481e6ead70000000000000000000000000000000000105d1d3ad744922019ce40015b731b000000000000000000000000000000653ae5b8b4e3bf838c8dad5f00baab715200000000000000000000000000000000002ca54cf76b2d0eabd4dfd4c2f269b7000000000000000000000000000000ed0fa724e64d0bb3e921d29010b254b52200000000000000000000000000000000002283dd0b6919a9b907efa0d1450c83000000000000000000000000000000247d9ae0da38579e4d8f9edbafee4b0e870000000000000000000000000000000000038619b10948cee4ab41f87863ea55000000000000000000000000000000c86cf7a365af37b59374be4838604fda5400000000000000000000000000000000000949507e05840a340738aa0059fca80000000000000000000000000000005b9a32f04b291a93fced33191d11eee64700000000000000000000000000000000001dbceafb63478c33422a6f7b27b729000000000000000000000000000000949df4a91191d0883796d0c17e02de6cfd00000000000000000000000000000000001617ac2fdecf6a50088e4f9e89e09c000000000000000000000000000000d46babede40775278eb51c34e5354e155f0000000000000000000000000000000000214599b8fdbaafc8ef1b5eda1d1294000000000000000000000000000000770087323d677abefbba29108b9a928c3300000000000000000000000000000000000806db3099dfbcd0b4b60d575268960000000000000000000000000000008cfc1070fca86e46a864c883af230b0f3b00000000000000000000000000000000000bf85e10e77edbfff9bb9c84b76ee700000000000000000000000000000015d43e3775742ef07d4d253ac41af7234a00000000000000000000000000000000000bb11cc6d7c81d0284afccb0bb848c00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000036a960d6f8e4ebd5c4e7619ef498b47415000000000000000000000000000000000003f8bc6c6f49222e71da7ef4f12f59000000000000000000000000000000da0e9ca5b0bc2a8b9f8dc55583a2f0607000000000000000000000000000000000002b34854ad662b37e0e76506e3eaf170000000000000000000000000000004eece60b91287df5941d8b6b85a1c163d4000000000000000000000000000000000025121f27276f7d6405b64dda2d13bb0000000000000000000000000000006b80ad59b5eaf076a2963328805fa2200f000000000000000000000000000000000018c545a23c08a38624afdc86a65d280000006b", + "aggregationObject": [ + "0x00000000000000000000000000000000000000000000000f3dac4a356c5fdd79", + "0x000000000000000000000000000000000000000000000001f8b97ff8c78f3f9f", + "0x00000000000000000000000000000000000000000000000cf72074e065bc22b3", + "0x0000000000000000000000000000000000000000000000000002dc10ffccda59", + "0x0000000000000000000000000000000000000000000000047bfb4dfb23cc889f", + "0x00000000000000000000000000000000000000000000000871e52274633f4bf7", + "0x0000000000000000000000000000000000000000000000013c49830a0ce95ff2", + "0x0000000000000000000000000000000000000000000000000002c5ab7c0bb98e", + "0x00000000000000000000000000000000000000000000000b0f322f57a86900ed", + "0x000000000000000000000000000000000000000000000002e496ababf56e3cd6", + "0x000000000000000000000000000000000000000000000005dd1141d5deb2050e", + "0x000000000000000000000000000000000000000000000000000268dc87f9458f", + "0x000000000000000000000000000000000000000000000003334a597cd9ec0a0e", + "0x00000000000000000000000000000000000000000000000645a57625996ab518", + "0x000000000000000000000000000000000000000000000006a2f7ffb16256c45b", + "0x00000000000000000000000000000000000000000000000000027ca8c331291b" + ] } diff --git a/yarn-project/noir-protocol-circuits-types/src/artifacts.ts b/yarn-project/noir-protocol-circuits-types/src/artifacts.ts index c87d1def6326..f93bc37bdd59 100644 --- a/yarn-project/noir-protocol-circuits-types/src/artifacts.ts +++ b/yarn-project/noir-protocol-circuits-types/src/artifacts.ts @@ -36,6 +36,8 @@ import PublicKernelTeardownJson from '../artifacts/public_kernel_teardown.json' import PublicKernelTeardownSimulatedJson from '../artifacts/public_kernel_teardown_simulated.json' assert { type: 'json' }; import BaseRollupJson from '../artifacts/rollup_base.json' assert { type: 'json' }; import BaseRollupSimulatedJson from '../artifacts/rollup_base_simulated.json' assert { type: 'json' }; +import BlockMergeRollupJson from '../artifacts/rollup_block_merge.json' assert { type: 'json' }; +import BlockRootRollupJson from '../artifacts/rollup_block_root.json' assert { type: 'json' }; import MergeRollupJson from '../artifacts/rollup_merge.json' assert { type: 'json' }; import RootRollupJson from '../artifacts/rollup_root.json' assert { type: 'json' }; @@ -67,6 +69,8 @@ export type ServerProtocolArtifact = | 'RootParityArtifact' | 'BaseRollupArtifact' | 'MergeRollupArtifact' + | 'BlockRootRollupArtifact' + | 'BlockMergeRollupArtifact' | 'RootRollupArtifact'; export type ClientProtocolArtifact = @@ -89,6 +93,8 @@ export const ServerCircuitArtifacts: Record = { RootParityArtifact: keyJsonToVKData(RootParityVkJson), BaseRollupArtifact: keyJsonToVKData(BaseRollupVkJson), MergeRollupArtifact: keyJsonToVKData(MergeRollupVkJson), + BlockRootRollupArtifact: keyJsonToVKData(BlockRootRollupVkJson), + BlockMergeRollupArtifact: keyJsonToVKData(BlockMergeRollupVkJson), RootRollupArtifact: keyJsonToVKData(RootRollupVkJson), }; @@ -127,6 +133,8 @@ export const ProtocolCircuitVkIndexes: Record = { RootParityArtifact: ROOT_PARITY_INDEX, BaseRollupArtifact: BASE_ROLLUP_INDEX, MergeRollupArtifact: MERGE_ROLLUP_INDEX, + BlockRootRollupArtifact: BLOCK_ROOT_ROLLUP_INDEX, + BlockMergeRollupArtifact: BLOCK_MERGE_ROLLUP_INDEX, RootRollupArtifact: ROOT_ROLLUP_INDEX, }; diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index c5f1958bbc37..278e6f7a65e1 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -4,9 +4,13 @@ import { AppendOnlyTreeSnapshot, type BaseOrMergeRollupPublicInputs, BaseRollupInputs, + BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + BlockRootRollupInputs, ConstantRollupData, Fr, type GlobalVariables, + type Header, KernelData, type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, MAX_NULLIFIERS_PER_TX, @@ -25,6 +29,7 @@ import { PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_TREE_HEIGHT, PartialStateReference, + PreviousRollupBlockData, PreviousRollupData, PublicDataHint, PublicDataTreeLeaf, @@ -34,7 +39,6 @@ import { type RecursiveProof, type RootParityInput, RootRollupInputs, - type RootRollupPublicInputs, StateDiffHints, type StateReference, VK_TREE_HEIGHT, @@ -185,11 +189,35 @@ export function createMergeRollupInputs( return mergeInputs; } +// TODO(#7346): Integrate batch rollup circuits and test below +export function createBlockMergeRollupInputs( + left: [ + BlockRootOrBlockMergePublicInputs, + RecursiveProof, + VerificationKeyAsFields, + ], + right: [ + BlockRootOrBlockMergePublicInputs, + RecursiveProof, + VerificationKeyAsFields, + ], +) { + const mergeInputs = new BlockMergeRollupInputs([ + getPreviousRollupBlockDataFromPublicInputs(left[0], left[1], left[2]), + getPreviousRollupBlockDataFromPublicInputs(right[0], right[1], right[2]), + ]); + return mergeInputs; +} + // Validate that the roots of all local trees match the output of the root circuit simulation -export async function validateRootOutput(rootOutput: RootRollupPublicInputs, db: MerkleTreeOperations) { +export async function validateBlockRootOutput( + blockRootOutput: BlockRootOrBlockMergePublicInputs, + blockHeader: Header, + db: MerkleTreeOperations, +) { await Promise.all([ - validateState(rootOutput.header.state, db), - validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), rootOutput.archive, 'Archive'), + validateState(blockHeader.state, db), + validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), blockRootOutput.newArchive, 'Archive'), ]); } @@ -210,8 +238,8 @@ export async function validateState(state: StateReference, db: MerkleTreeOperati ); } -// Builds the inputs for the root rollup circuit, without making any changes to trees -export async function getRootRollupInput( +// Builds the inputs for the block root rollup circuit, without making any changes to trees +export async function getBlockRootRollupInput( rollupOutputLeft: BaseOrMergeRollupPublicInputs, rollupProofLeft: RecursiveProof, verificationKeyLeft: VerificationKeyAsFields, @@ -225,7 +253,7 @@ export async function getRootRollupInput( db: MerkleTreeOperations, proverId: Fr, ) { - const previousRollupData: RootRollupInputs['previousRollupData'] = [ + const previousRollupData: BlockRootRollupInputs['previousRollupData'] = [ getPreviousRollupDataFromPublicInputs(rollupOutputLeft, rollupProofLeft, verificationKeyLeft), getPreviousRollupDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight), ]; @@ -246,7 +274,7 @@ export async function getRootRollupInput( 0, ); - return RootRollupInputs.from({ + return BlockRootRollupInputs.from({ previousRollupData, l1ToL2Roots, newL1ToL2Messages, @@ -254,6 +282,30 @@ export async function getRootRollupInput( startL1ToL2MessageTreeSnapshot: messageTreeSnapshot, startArchiveSnapshot, newArchiveSiblingPath, + // TODO(#7346): Inject previous block hash (required when integrating batch rollup circuits) + previousBlockHash: Fr.ZERO, + proverId, + }); +} + +// Builds the inputs for the final root rollup circuit, without making any changes to trees +// TODO(#7346): Integrate batch rollup circuits and test below +export function getRootRollupInput( + rollupOutputLeft: BlockRootOrBlockMergePublicInputs, + rollupProofLeft: RecursiveProof, + verificationKeyLeft: VerificationKeyAsFields, + rollupOutputRight: BlockRootOrBlockMergePublicInputs, + rollupProofRight: RecursiveProof, + verificationKeyRight: VerificationKeyAsFields, + proverId: Fr, +) { + const previousRollupData: RootRollupInputs['previousRollupData'] = [ + getPreviousRollupBlockDataFromPublicInputs(rollupOutputLeft, rollupProofLeft, verificationKeyLeft), + getPreviousRollupBlockDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight), + ]; + + return RootRollupInputs.from({ + previousRollupData, proverId, }); } @@ -273,6 +325,21 @@ export function getPreviousRollupDataFromPublicInputs( ); } +export function getPreviousRollupBlockDataFromPublicInputs( + rollupOutput: BlockRootOrBlockMergePublicInputs, + rollupProof: RecursiveProof, + vk: VerificationKeyAsFields, +) { + const leafIndex = getVKIndex(vk); + + return new PreviousRollupBlockData( + rollupOutput, + rollupProof, + vk, + new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)), + ); +} + export async function getConstantRollupData( globalVariables: GlobalVariables, db: MerkleTreeOperations, diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 57adc9806332..b86efedceb66 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -32,8 +32,10 @@ import { type BaseOrMergeRollupPublicInputs, BaseParityInputs, type BaseRollupInputs, + ContentCommitment, Fr, type GlobalVariables, + Header, type KernelCircuitPublicInputs, L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, @@ -47,6 +49,7 @@ import { type RecursiveProof, type RootParityInput, RootParityInputs, + StateReference, type TUBE_PROOF_LENGTH, TubeInputs, type VerificationKeyAsFields, @@ -56,6 +59,7 @@ import { } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; import { padArrayEnd } from '@aztec/foundation/collection'; +import { sha256Trunc } from '@aztec/foundation/crypto'; import { AbortError } from '@aztec/foundation/error'; import { createDebugLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; @@ -71,11 +75,11 @@ import { inspect } from 'util'; import { buildBaseRollupInput, createMergeRollupInputs, - getRootRollupInput, + getBlockRootRollupInput, getSubtreeSiblingPath, getTreeSnapshot, + validateBlockRootOutput, validatePartialState, - validateRootOutput, validateTx, } from './block-building-helpers.js'; import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js'; @@ -392,6 +396,52 @@ export class ProvingOrchestrator implements BlockProver { this.provingState?.cancel(); } + /** + * Extract the block header from public inputs. + * TODO(#7346): Refactor this once new batch rollup circuits are integrated + * @returns The header of this proving state's block. + */ + private async extractBlockHeader() { + if ( + !this.provingState || + !this.provingState.blockRootRollupPublicInputs || + !this.provingState.finalRootParityInput?.publicInputs.shaRoot + ) { + throw new Error(`Invalid proving state, a block must be proven before its header can be extracted.`); + } + + const rootRollupOutputs = this.provingState.blockRootRollupPublicInputs; + const previousMergeData = this.provingState.getMergeInputs(0).inputs; + + if (!previousMergeData[0] || !previousMergeData[1]) { + throw new Error(`Invalid proving state, final merge inputs before block root circuit missing.`); + } + + const contentCommitment = new ContentCommitment( + new Fr(previousMergeData[0].numTxs + previousMergeData[1].numTxs), + sha256Trunc( + Buffer.concat([previousMergeData[0].txsEffectsHash.toBuffer(), previousMergeData[1].txsEffectsHash.toBuffer()]), + ), + this.provingState.finalRootParityInput.publicInputs.shaRoot.toBuffer(), + sha256Trunc(Buffer.concat([previousMergeData[0].outHash.toBuffer(), previousMergeData[1].outHash.toBuffer()])), + ); + const state = new StateReference( + await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.db), + previousMergeData[1].end, + ); + const header = new Header( + rootRollupOutputs.previousArchive, + contentCommitment, + state, + previousMergeData[0].constants.globalVariables, + previousMergeData[0].accumulatedFees.add(previousMergeData[1].accumulatedFees), + ); + if (!header.hash().equals(rootRollupOutputs.endBlockHash)) { + throw new Error(`Block header mismatch in finalise.`); + } + return header; + } + /** * Performs the final tree update for the block and returns the fully proven block. * @returns The fully proven block and proof. @@ -407,7 +457,7 @@ export class ProvingOrchestrator implements BlockProver { try { if ( !this.provingState || - !this.provingState.rootRollupPublicInputs || + !this.provingState.blockRootRollupPublicInputs || !this.provingState.finalProof || !this.provingState.finalAggregationObject ) { @@ -417,12 +467,13 @@ export class ProvingOrchestrator implements BlockProver { throw new Error('Block already finalised'); } - const rootRollupOutputs = this.provingState.rootRollupPublicInputs; + const rootRollupOutputs = this.provingState.blockRootRollupPublicInputs; + const header = await this.extractBlockHeader(); logger?.debug(`Updating and validating root trees`); - await this.db.updateArchive(rootRollupOutputs.header); + await this.db.updateArchive(header); - await validateRootOutput(rootRollupOutputs, this.db); + await validateBlockRootOutput(rootRollupOutputs, header, this.db); // Collect all new nullifiers, commitments, and contracts from all txs in this block const gasFees = this.provingState.globalVariables.gasFees; @@ -432,17 +483,17 @@ export class ProvingOrchestrator implements BlockProver { const blockBody = new Body(nonEmptyTxEffects); const l2Block = L2Block.fromFields({ - archive: rootRollupOutputs.archive, - header: rootRollupOutputs.header, + archive: rootRollupOutputs.newArchive, + header: header, body: blockBody, }); - if (!l2Block.body.getTxsEffectsHash().equals(rootRollupOutputs.header.contentCommitment.txsEffectsHash)) { + if (!l2Block.body.getTxsEffectsHash().equals(header.contentCommitment.txsEffectsHash)) { logger.debug(inspect(blockBody)); throw new Error( `Txs effects hash mismatch, ${l2Block.body .getTxsEffectsHash() - .toString('hex')} == ${rootRollupOutputs.header.contentCommitment.txsEffectsHash.toString('hex')} `, + .toString('hex')} == ${header.contentCommitment.txsEffectsHash.toString('hex')} `, ); } @@ -756,7 +807,7 @@ export class ProvingOrchestrator implements BlockProver { ); } - // Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/root circuit + // Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit // Enqueues the next level of merge if all inputs are available private enqueueMergeRollup( provingState: ProvingState, @@ -790,8 +841,8 @@ export class ProvingOrchestrator implements BlockProver { ); } - // Executes the root rollup circuit - private async enqueueRootRollup(provingState: ProvingState | undefined) { + // Executes the block root rollup circuit + private async enqueueBlockRootRollup(provingState: ProvingState | undefined) { if (!provingState?.verifyState()) { logger.debug('Not running root rollup, state no longer valid'); return; @@ -799,7 +850,7 @@ export class ProvingOrchestrator implements BlockProver { const mergeInputData = provingState.getMergeInputs(0); const rootParityInput = provingState.finalRootParityInput!; - const inputs = await getRootRollupInput( + const inputs = await getBlockRootRollupInput( mergeInputData.inputs[0]!, mergeInputData.proofs[0]!, mergeInputData.verificationKeys[0]!, @@ -818,15 +869,15 @@ export class ProvingOrchestrator implements BlockProver { provingState, wrapCallbackInSpan( this.tracer, - 'ProvingOrchestrator.prover.getRootRollupProof', + 'ProvingOrchestrator.prover.getBlockRootRollupProof', { [Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server', - [Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup' as CircuitName, + [Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-root-rollup' as CircuitName, }, - signal => this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber), + signal => this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber), ), result => { - provingState.rootRollupPublicInputs = result.inputs; + provingState.blockRootRollupPublicInputs = result.inputs; provingState.finalAggregationObject = extractAggregationObject( result.proof.binaryProof, result.verificationKey.numPublicInputs, @@ -886,17 +937,17 @@ export class ProvingOrchestrator implements BlockProver { ), async rootInput => { provingState!.finalRootParityInput = rootInput; - await this.checkAndEnqueueRootRollup(provingState); + await this.checkAndEnqueueBlockRootRollup(provingState); }, ); } - private async checkAndEnqueueRootRollup(provingState: ProvingState | undefined) { - if (!provingState?.isReadyForRootRollup()) { + private async checkAndEnqueueBlockRootRollup(provingState: ProvingState | undefined) { + if (!provingState?.isReadyForBlockRootRollup()) { logger.debug('Not ready for root rollup'); return; } - await this.enqueueRootRollup(provingState); + await this.enqueueBlockRootRollup(provingState); } /** @@ -925,7 +976,7 @@ export class ProvingOrchestrator implements BlockProver { if (result.mergeLevel === 0n) { // TODO (alexg) remove this `void` - void this.checkAndEnqueueRootRollup(provingState); + void this.checkAndEnqueueBlockRootRollup(provingState); } else { // onto the next merge level this.enqueueMergeRollup(provingState, result.mergeLevel, result.indexWithinMergeLevel, result.mergeInputData); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts index f23a17c32e1e..d4ab78cb91c4 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts @@ -46,11 +46,24 @@ describe('prover/orchestrator/failures', () => { }, ], [ - 'Root Rollup Failed', + 'Block Root Rollup Failed', () => { - jest.spyOn(mockProver, 'getRootRollupProof').mockRejectedValue('Root Rollup Failed'); + jest.spyOn(mockProver, 'getBlockRootRollupProof').mockRejectedValue('Block Root Rollup Failed'); }, ], + // TODO(#7346): Integrate batch rollup circuits into orchestrator and test here + // [ + // 'Block Merge Rollup Failed', + // () => { + // jest.spyOn(mockProver, 'getBlockMergeRollupProof').mockRejectedValue('Block Merge Rollup Failed'); + // }, + // ], + // [ + // 'Root Rollup Failed', + // () => { + // jest.spyOn(mockProver, 'getRootRollupProof').mockRejectedValue('Root Rollup Failed'); + // }, + // ], [ 'Base Parity Failed', () => { diff --git a/yarn-project/prover-client/src/orchestrator/proving-state.ts b/yarn-project/prover-client/src/orchestrator/proving-state.ts index b375bb7e0ac3..380720d09c97 100644 --- a/yarn-project/prover-client/src/orchestrator/proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/proving-state.ts @@ -2,6 +2,7 @@ import { type L2Block, type MerkleTreeId, type ProvingResult } from '@aztec/circ import { type AppendOnlyTreeSnapshot, type BaseOrMergeRollupPublicInputs, + type BlockRootOrBlockMergePublicInputs, type Fr, type GlobalVariables, type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, @@ -11,7 +12,6 @@ import { type RECURSIVE_PROOF_LENGTH, type RecursiveProof, type RootParityInput, - type RootRollupPublicInputs, type VerificationKeyAsFields, } from '@aztec/circuits.js'; import { type Tuple } from '@aztec/foundation/serialize'; @@ -46,7 +46,7 @@ export class ProvingState { private mergeRollupInputs: MergeRollupInputData[] = []; private rootParityInputs: Array | undefined> = []; private finalRootParityInputs: RootParityInput | undefined; - public rootRollupPublicInputs: RootRollupPublicInputs | undefined; + public blockRootRollupPublicInputs: BlockRootOrBlockMergePublicInputs | undefined; public finalAggregationObject: Fr[] | undefined; public finalProof: Proof | undefined; public block: L2Block | undefined; @@ -193,8 +193,8 @@ export class ProvingState { return this.mergeRollupInputs[indexOfMerge]; } - // Returns true if we have sufficient inputs to execute the root rollup - public isReadyForRootRollup() { + // Returns true if we have sufficient inputs to execute the block root rollup + public isReadyForBlockRootRollup() { return !( this.mergeRollupInputs[0] === undefined || this.finalRootParityInput === undefined || diff --git a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts index a161722315b3..79ae4b85d59e 100644 --- a/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-agent/memory-proving-queue.ts @@ -15,6 +15,9 @@ import type { BaseOrMergeRollupPublicInputs, BaseParityInputs, BaseRollupInputs, + BlockMergeRollupInputs, + BlockRootOrBlockMergePublicInputs, + BlockRootRollupInputs, KernelCircuitPublicInputs, MergeRollupInputs, NESTED_RECURSIVE_PROOF_LENGTH, @@ -330,6 +333,30 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource return this.enqueue({ type: ProvingRequestType.MERGE_ROLLUP, inputs: input }, signal, epochNumber); } + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBlockRootRollupProof( + input: BlockRootRollupInputs, + signal?: AbortSignal, + epochNumber?: number, + ): Promise> { + return this.enqueue({ type: ProvingRequestType.BLOCK_ROOT_ROLLUP, inputs: input }, signal, epochNumber); + } + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBlockMergeRollupProof( + input: BlockMergeRollupInputs, + signal?: AbortSignal, + epochNumber?: number, + ): Promise> { + return this.enqueue({ type: ProvingRequestType.BLOCK_MERGE_ROLLUP, inputs: input }, signal, epochNumber); + } + /** * Creates a proof for the given input. * @param input - Input to the circuit. diff --git a/yarn-project/prover-client/src/prover-agent/prover-agent.ts b/yarn-project/prover-client/src/prover-agent/prover-agent.ts index 9f31ce90eccf..bcf8e3fc5e36 100644 --- a/yarn-project/prover-client/src/prover-agent/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-agent/prover-agent.ts @@ -183,6 +183,14 @@ export class ProverAgent { return this.circuitProver.getMergeRollupProof(inputs); } + case ProvingRequestType.BLOCK_ROOT_ROLLUP: { + return this.circuitProver.getBlockRootRollupProof(inputs); + } + + case ProvingRequestType.BLOCK_MERGE_ROLLUP: { + return this.circuitProver.getBlockMergeRollupProof(inputs); + } + case ProvingRequestType.ROOT_ROLLUP: { return this.circuitProver.getRootRollupProof(inputs); } diff --git a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts index 089f6a722700..b5a7722a69ea 100644 --- a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts @@ -66,7 +66,7 @@ describe('prover/bb_prover/full-rollup', () => { logger.info(`Finalising block`); const blockResult = await context.orchestrator.finaliseBlock(); - await expect(prover.verifyProof('RootRollupArtifact', blockResult.proof)).resolves.not.toThrow(); + await expect(prover.verifyProof('BlockRootRollupArtifact', blockResult.proof)).resolves.not.toThrow(); }); // TODO(@PhilWindle): Remove public functions and re-enable once we can handle empty tx slots diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 41b25de86a7f..dda5091237fc 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -66,6 +66,7 @@ describe('L1Publisher', () => { let header: Buffer; let archive: Buffer; + let blockHash: Buffer; let body: Buffer; let account: PrivateKeyAccount; @@ -77,6 +78,7 @@ describe('L1Publisher', () => { header = l2Block.header.toBuffer(); archive = l2Block.archive.root.toBuffer(); + blockHash = l2Block.header.hash().toBuffer(); body = l2Block.body.toBuffer(); processTxHash = `0x${Buffer.from('txHashProcess').toString('hex')}`; // random tx hash @@ -132,7 +134,12 @@ describe('L1Publisher', () => { expect(result).toEqual(true); - const args = [`0x${header.toString('hex')}`, `0x${archive.toString('hex')}`, `0x${body.toString('hex')}`] as const; + const args = [ + `0x${header.toString('hex')}`, + `0x${archive.toString('hex')}`, + `0x${blockHash.toString('hex')}`, + `0x${body.toString('hex')}`, + ] as const; expect(rollupContractWrite.publishAndProcess).toHaveBeenCalledWith(args, { account: account }); expect(publicClient.getTransactionReceipt).toHaveBeenCalledWith({ hash: publishAndProcessTxHash }); }); @@ -146,7 +153,11 @@ describe('L1Publisher', () => { const result = await publisher.processL2Block(l2Block); expect(result).toEqual(true); - const args = [`0x${header.toString('hex')}`, `0x${archive.toString('hex')}`] as const; + const args = [ + `0x${header.toString('hex')}`, + `0x${archive.toString('hex')}`, + `0x${blockHash.toString('hex')}`, + ] as const; expect(rollupContractWrite.process).toHaveBeenCalledWith(args, { account }); expect(publicClient.getTransactionReceipt).toHaveBeenCalledWith({ hash: processTxHash }); }); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 405e68ea7368..fa1bf420f8e7 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -72,6 +72,8 @@ export type L1ProcessArgs = { header: Buffer; /** A root of the archive tree after the L2 block is applied. */ archive: Buffer; + /** The L2 block's leaf in the archive tree. */ + blockHash: Buffer; /** L2 block body. */ body: Buffer; /** Attestations */ @@ -213,6 +215,7 @@ export class L1Publisher { const processTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), + blockHash: block.header.hash().toBuffer(), body: encodedBody, attestations, }; @@ -372,7 +375,7 @@ export class L1Publisher { `0x${proof.toString('hex')}`, ] as const; - return await this.rollupContract.write.submitProof(args, { + return await this.rollupContract.write.submitBlockRootProof(args, { account: this.account, }); } catch (err) { @@ -405,6 +408,7 @@ export class L1Publisher { const args = [ `0x${encodedData.header.toString('hex')}`, `0x${encodedData.archive.toString('hex')}`, + `0x${encodedData.blockHash.toString('hex')}`, attestations, ] as const; @@ -412,7 +416,11 @@ export class L1Publisher { account: this.account, }); } else { - const args = [`0x${encodedData.header.toString('hex')}`, `0x${encodedData.archive.toString('hex')}`] as const; + const args = [ + `0x${encodedData.header.toString('hex')}`, + `0x${encodedData.archive.toString('hex')}`, + `0x${encodedData.blockHash.toString('hex')}`, + ] as const; return await this.rollupContract.write.process(args, { account: this.account, @@ -434,6 +442,7 @@ export class L1Publisher { const args = [ `0x${encodedData.header.toString('hex')}`, `0x${encodedData.archive.toString('hex')}`, + `0x${encodedData.blockHash.toString('hex')}`, attestations, `0x${encodedData.body.toString('hex')}`, ] as const; @@ -445,6 +454,7 @@ export class L1Publisher { const args = [ `0x${encodedData.header.toString('hex')}`, `0x${encodedData.archive.toString('hex')}`, + `0x${encodedData.blockHash.toString('hex')}`, `0x${encodedData.body.toString('hex')}`, ] as const; diff --git a/yarn-project/simulator/src/rollup/rollup.ts b/yarn-project/simulator/src/rollup/rollup.ts index 4c9511c218b0..8ee8b036ca79 100644 --- a/yarn-project/simulator/src/rollup/rollup.ts +++ b/yarn-project/simulator/src/rollup/rollup.ts @@ -3,6 +3,9 @@ import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, type BaseRollupInputs, + type BlockMergeRollupInputs, + type BlockRootOrBlockMergePublicInputs, + type BlockRootRollupInputs, type MergeRollupInputs, type ParityPublicInputs, type RootParityInputs, @@ -15,6 +18,10 @@ import { SimulatedServerCircuitArtifacts, convertBaseParityInputsToWitnessMap, convertBaseParityOutputsFromWitnessMap, + convertBlockMergeRollupInputsToWitnessMap, + convertBlockMergeRollupOutputsFromWitnessMap, + convertBlockRootRollupInputsToWitnessMap, + convertBlockRootRollupOutputsFromWitnessMap, convertMergeRollupInputsToWitnessMap, convertMergeRollupOutputsFromWitnessMap, convertRootParityInputsToWitnessMap, @@ -56,6 +63,18 @@ export interface RollupSimulator { * @returns The public inputs as outputs of the simulation. */ mergeRollupCircuit(input: MergeRollupInputs): Promise; + /** + * Simulates the block root rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + blockRootRollupCircuit(input: BlockRootRollupInputs): Promise; + /** + * Simulates the block merge rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + blockMergeRollupCircuit(input: BlockMergeRollupInputs): Promise; /** * Simulates the root rollup circuit from its inputs. * @param input - Inputs to the circuit. @@ -128,6 +147,7 @@ export class RealRollupCircuitSimulator implements RollupSimulator { return Promise.resolve(result); } + /** * Simulates the merge rollup circuit from its inputs. * @param input - Inputs to the circuit. @@ -146,6 +166,42 @@ export class RealRollupCircuitSimulator implements RollupSimulator { return result; } + /** + * Simulates the block root rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async blockRootRollupCircuit(input: BlockRootRollupInputs): Promise { + const witnessMap = convertBlockRootRollupInputsToWitnessMap(input); + + const witness = await this.wasmSimulator.simulateCircuit( + witnessMap, + SimulatedServerCircuitArtifacts.BlockRootRollupArtifact, + ); + + const result = convertBlockRootRollupOutputsFromWitnessMap(witness); + + return Promise.resolve(result); + } + + /** + * Simulates the block merge rollup circuit from its inputs. + * @param input - Inputs to the circuit. + * @returns The public inputs as outputs of the simulation. + */ + public async blockMergeRollupCircuit(input: BlockMergeRollupInputs): Promise { + const witnessMap = convertBlockMergeRollupInputsToWitnessMap(input); + + const witness = await this.wasmSimulator.simulateCircuit( + witnessMap, + SimulatedServerCircuitArtifacts.BlockMergeRollupArtifact, + ); + + const result = convertBlockMergeRollupOutputsFromWitnessMap(witness); + + return Promise.resolve(result); + } + /** * Simulates the root rollup circuit from its inputs. * @param input - Inputs to the circuit.