diff --git a/l1-contracts/gas_benchmark.md b/l1-contracts/gas_benchmark.md index e84f0b4febdf..56898b7b9809 100644 --- a/l1-contracts/gas_benchmark.md +++ b/l1-contracts/gas_benchmark.md @@ -1,20 +1,20 @@ -Function | Metric | No Validators (gas/tx) | 100 Validators (gas/tx) | Δ Gas (gas/tx) | % Overhead +Function | Metric | No Validators (gas/tx) | 100 Validators (gas/tx) | Δ Gas (gas/tx) | % Overhead -------------------------+---------+------------------------+-------------------------+------------------------+----------------- -forward | Min | 308660 (857.39) | 308660 (857.39) | 0 (0.00) | 0.00% -forward | Avg | 320436 (890.10) | 543109 (1508.64) | 222673 (618.54) | 69.49% -forward | Median | 320645 (890.68) | 642954 (1785.98) | 322309 (895.30) | 100.52% -forward | Max | 332545 (923.74) | 655258 (1820.16) | 322713 (896.42) | 97.04% +forward | Min | 298181 (828.28) | 576397 (1601.10) | 278216 (772.82) | 93.30% +forward | Avg | 310056 (861.27) | 588480 (1634.67) | 278424 (773.40) | 89.80% +forward | Median | 308932 (858.14) | 587490 (1631.92) | 278558 (773.77) | 90.17% +forward | Max | 321646 (893.46) | 600346 (1667.63) | 278700 (774.17) | 86.65% forward | # Calls | 100 | 100 | 0 | 0.00% -------------------------+---------+------------------------+-------------------------+------------------------+----------------- -setupEpoch | Min | 52590 (4.57) | 39767 (3.45) | -12823 (-1.11) | -24.38% -setupEpoch | Avg | 57031 (4.95) | 92551 (8.03) | 35520 (3.08) | 62.28% -setupEpoch | Median | 57360 (4.98) | 40222 (3.49) | -17138 (-1.49) | -29.88% -setupEpoch | Max | 84128 (7.30) | 1655176 (143.68) | 1571048 (136.38) | 1867.45% +setupEpoch | Min | 37724 (3.27) | 37724 (3.27) | 0 (0.00) | 0.00% +setupEpoch | Avg | 42099 (3.65) | 61483 (5.34) | 19384 (1.68) | 46.04% +setupEpoch | Median | 39724 (3.45) | 39724 (3.45) | 0 (0.00) | 0.00% +setupEpoch | Max | 112629 (9.78) | 597218 (51.84) | 484589 (42.07) | 430.25% setupEpoch | # Calls | 100 | 100 | 0 | 0.00% -------------------------+---------+------------------------+-------------------------+------------------------+----------------- -submitEpochRootProof | Min | 868811 (75.42) | 868811 (75.42) | 0 (0.00) | 0.00% -submitEpochRootProof | Avg | 886463 (76.95) | 886463 (76.95) | 0 (0.00) | 0.00% -submitEpochRootProof | Median | 868823 (75.42) | 868823 (75.42) | 0 (0.00) | 0.00% -submitEpochRootProof | Max | 921756 (80.01) | 921756 (80.01) | 0 (0.00) | 0.00% +submitEpochRootProof | Min | 867316 (75.29) | 867316 (75.29) | 0 (0.00) | 0.00% +submitEpochRootProof | Avg | 885016 (76.82) | 885016 (76.82) | 0 (0.00) | 0.00% +submitEpochRootProof | Median | 867400 (75.30) | 867400 (75.30) | 0 (0.00) | 0.00% +submitEpochRootProof | Max | 920334 (79.89) | 920334 (79.89) | 0 (0.00) | 0.00% submitEpochRootProof | # Calls | 3 | 3 | 0 | 0.00% -------------------------+---------+------------------------+-------------------------+------------------------+----------------- diff --git a/l1-contracts/gas_report.json b/l1-contracts/gas_report.json index 990e5420a30e..173c6310bea8 100644 --- a/l1-contracts/gas_report.json +++ b/l1-contracts/gas_report.json @@ -3,7 +3,7 @@ "contract": "src/core/Rollup.sol:Rollup", "deployment": { "gas": 0, - "size": 43024 + "size": 43935 }, "functions": { "archive()": { @@ -14,7 +14,7 @@ "max": 4583 }, "getBlock(uint256)": { - "calls": 886, + "calls": 847, "min": 9208, "mean": 9208, "median": 9208, @@ -50,17 +50,17 @@ }, "getFeeAssetPortal()": { "calls": 3104, - "min": 497, - "mean": 1497, - "median": 1497, - "max": 2497 + "min": 453, + "mean": 1453, + "median": 1453, + "max": 2453 }, "getInbox()": { "calls": 6198, - "min": 2476, - "mean": 2476, - "median": 2476, - "max": 2476 + "min": 2498, + "mean": 2498, + "median": 2498, + "max": 2498 }, "getL1FeesAt(uint256)": { "calls": 2, @@ -71,10 +71,10 @@ }, "getManaBaseFeeAt(uint256,bool)": { "calls": 2325, - "min": 30582, - "mean": 32494, - "median": 30582, - "max": 36337 + "min": 30538, + "mean": 32450, + "median": 30538, + "max": 36293 }, "getManaTarget()": { "calls": 2, @@ -85,10 +85,10 @@ }, "getOutbox()": { "calls": 3875, - "min": 2474, - "mean": 2474, - "median": 2474, - "max": 2474 + "min": 2452, + "mean": 2452, + "median": 2452, + "max": 2452 }, "getPendingBlockNumber()": { "calls": 1545, @@ -99,10 +99,10 @@ }, "getProofSubmissionWindow()": { "calls": 4, - "min": 2493, - "mean": 2493, - "median": 2493, - "max": 2493 + "min": 2404, + "mean": 2404, + "median": 2404, + "max": 2404 }, "getProvenBlockNumber()": { "calls": 6196, @@ -127,10 +127,10 @@ }, "getSequencerRewards(address)": { "calls": 2, - "min": 2649, - "mean": 2649, - "median": 2649, - "max": 2649 + "min": 2716, + "mean": 2716, + "median": 2716, + "max": 2716 }, "getTimestampForSlot(uint256)": { "calls": 2432, @@ -148,17 +148,17 @@ }, "owner()": { "calls": 1552, - "min": 464, - "mean": 464, - "median": 464, - "max": 464 + "min": 420, + "mean": 420, + "median": 420, + "max": 420 }, - "propose((bytes32,bytes,(int256),bytes,bytes32[]),(bool,uint8,bytes32,bytes32)[],bytes)": { + "propose((bytes32,bytes,(int256),bytes,bytes32[]),(address,(uint8,bytes32,bytes32))[],bytes)": { "calls": 2327, - "min": 152057, - "mean": 334744, - "median": 330093, - "max": 360753 + "min": 180367, + "mean": 343443, + "median": 348542, + "max": 380636 }, "prune()": { "calls": 6, @@ -169,17 +169,17 @@ }, "setProvingCostPerMana(uint256)": { "calls": 2, - "min": 28669, - "mean": 28669, - "median": 28669, - "max": 28669 + "min": 28780, + "mean": 28780, + "median": 28780, + "max": 28780 }, - "submitEpochRootProof((uint256,uint256,(bytes32,bytes32,uint256,bytes32,address),bytes32[],bytes,bytes))": { - "calls": 885, - "min": 59252, - "mean": 419060, - "median": 415459, - "max": 449558 + "submitEpochRootProof((uint256,uint256,(bytes32,bytes32,address),bytes32[],bytes,bytes))": { + "calls": 846, + "min": 58871, + "mean": 417467, + "median": 414067, + "max": 448154 } } }, @@ -244,7 +244,7 @@ }, "functions": { "getCanonicalRollup()": { - "calls": 2643, + "calls": 2526, "min": 1016, "mean": 3016, "median": 1016, @@ -281,7 +281,7 @@ "max": 238 }, "canonicalRollup()": { - "calls": 881, + "calls": 842, "min": 10158, "mean": 10158, "median": 10158, diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index f011392bab9c..b1f6774502bd 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -43,7 +43,7 @@ import { Epoch, Timestamp, Errors, - Signature, + CommitteeAttestation, ExtRollupLib, EthValue, STFLib, @@ -94,7 +94,7 @@ contract Rollup is IStaking, IValidatorSelection, IRollup, RollupCore { * without having to deal with viem or anvil for simulating timestamps in the future. * * @param _header - The header to validate - * @param _signatures - The signatures to validate + * @param _attestations - The attestations to validate * @param _digest - The digest to validate * @param _currentTime - The current time * @param _blobsHash - The blobs hash for this block @@ -102,7 +102,7 @@ contract Rollup is IStaking, IValidatorSelection, IRollup, RollupCore { */ function validateHeader( bytes calldata _header, - Signature[] memory _signatures, + CommitteeAttestation[] memory _attestations, bytes32 _digest, Timestamp _currentTime, bytes32 _blobsHash, @@ -111,7 +111,7 @@ contract Rollup is IStaking, IValidatorSelection, IRollup, RollupCore { ProposeLib.validateHeader( ValidateHeaderArgs({ header: HeaderLib.decode(_header), - attestations: _signatures, + attestations: _attestations, digest: _digest, currentTime: _currentTime, manaBaseFee: getManaBaseFeeAt(_currentTime, true), @@ -148,6 +148,22 @@ contract Rollup is IStaking, IValidatorSelection, IRollup, RollupCore { return getEpochCommittee(getEpochAt(_ts)); } + /** + * @notice Get the committee commitment a the given timestamp + * + * @param _ts - The timestamp to get the committee for + * + * @return The committee commitment for the given timestamp + * @return The committee size for the given timestamp + */ + function getCommitteeCommitmentAt(Timestamp _ts) + external + override(IValidatorSelection) + returns (bytes32, uint256) + { + return ValidatorSelectionLib.getCommitteeCommitmentAt(getEpochAt(_ts)); + } + /** * @notice Get the proposer for the current slot * @@ -186,14 +202,10 @@ contract Rollup is IStaking, IValidatorSelection, IRollup, RollupCore { bytes32 tipArchive = rollupStore.blocks[pendingBlockNumber].archive; require(tipArchive == _archive, Errors.Rollup__InvalidArchive(tipArchive, _archive)); - Signature[] memory sigs = new Signature[](0); - - ValidatorSelectionLib.verify( - slot, - slot.epochFromSlot(), - sigs, - _archive, - BlockHeaderValidationFlags({ignoreDA: true, ignoreSignatures: true}) + Epoch epochNumber = slot.epochFromSlot(); + address proposer = ValidatorSelectionLib.getProposerAt(slot, epochNumber); + require( + proposer == msg.sender, Errors.ValidatorSelection__InvalidProposer(proposer, msg.sender) ); return (slot, pendingBlockNumber + 1); diff --git a/l1-contracts/src/core/RollupCore.sol b/l1-contracts/src/core/RollupCore.sol index d705366735ab..c62cf21a00e1 100644 --- a/l1-contracts/src/core/RollupCore.sol +++ b/l1-contracts/src/core/RollupCore.sol @@ -15,7 +15,7 @@ import {IValidatorSelectionCore} from "@aztec/core/interfaces/IValidatorSelectio import {IInbox} from "@aztec/core/interfaces/messagebridge/IInbox.sol"; import {IOutbox} from "@aztec/core/interfaces/messagebridge/IOutbox.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {CommitteeAttestation} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {CheatLib} from "@aztec/core/libraries/rollup/CheatLib.sol"; import {ExtRollupLib} from "@aztec/core/libraries/rollup/ExtRollupLib.sol"; @@ -215,10 +215,10 @@ contract RollupCore is function propose( ProposeArgs calldata _args, - Signature[] memory _signatures, + CommitteeAttestation[] memory _attestations, bytes calldata _blobInput ) external override(IRollupCore) { - ExtRollupLib.propose(_args, _signatures, _blobInput, checkBlob); + ExtRollupLib.propose(_args, _attestations, _blobInput, checkBlob); } function setupEpoch() public override(IValidatorSelectionCore) { diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 9313f0fe4e08..67f3ad643218 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -6,7 +6,7 @@ import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; import {IVerifier} from "@aztec/core/interfaces/IVerifier.sol"; import {IInbox} from "@aztec/core/interfaces/messagebridge/IInbox.sol"; import {IOutbox} from "@aztec/core/interfaces/messagebridge/IOutbox.sol"; -import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {CommitteeAttestation} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import { FeeHeader, L1FeeData, ManaBaseFeeComponents } from "@aztec/core/libraries/rollup/FeeLib.sol"; @@ -137,7 +137,7 @@ interface IRollupCore { function propose( ProposeArgs calldata _args, - Signature[] memory _signatures, + CommitteeAttestation[] memory _attestations, bytes calldata _blobInput ) external; @@ -150,7 +150,7 @@ interface IRollupCore { interface IRollup is IRollupCore { function validateHeader( bytes calldata _header, - Signature[] memory _signatures, + CommitteeAttestation[] memory _attestations, bytes32 _digest, Timestamp _currentTime, bytes32 _blobsHash, diff --git a/l1-contracts/src/core/interfaces/IValidatorSelection.sol b/l1-contracts/src/core/interfaces/IValidatorSelection.sol index 411c1be0a98f..b548fed5dc75 100644 --- a/l1-contracts/src/core/interfaces/IValidatorSelection.sol +++ b/l1-contracts/src/core/interfaces/IValidatorSelection.sol @@ -4,21 +4,10 @@ pragma solidity >=0.8.27; import {Timestamp, Slot, Epoch} from "@aztec/core/libraries/TimeLib.sol"; import {Checkpoints} from "@oz/utils/structs/Checkpoints.sol"; -/** - * @notice The data structure for an epoch - * @param committee - The attesters for the epoch - * @param sampleSeed - The seed used to sample the attesters of the epoch - * @param nextSeed - The seed used to influence the NEXT epoch - */ - -struct EpochData { - // TODO: remove in favor of commitment to comittee - address[] committee; -} struct ValidatorSelectionStorage { // A mapping to snapshots of the validator set - mapping(Epoch => EpochData) epochs; + mapping(Epoch => bytes32 committeeCommitment) committeeCommitments; // Checkpointed map of epoch -> sample seed Checkpoints.Trace224 seeds; uint256 targetCommitteeSize; @@ -37,6 +26,7 @@ interface IValidatorSelection is IValidatorSelectionCore { // Non view as uses transient storage function getCurrentEpochCommittee() external returns (address[] memory); function getCommitteeAt(Timestamp _ts) external returns (address[] memory); + function getCommitteeCommitmentAt(Timestamp _ts) external returns (bytes32, uint256); function getEpochCommittee(Epoch _epoch) external returns (address[] memory); // Stable diff --git a/l1-contracts/src/core/libraries/Errors.sol b/l1-contracts/src/core/libraries/Errors.sol index 59062eff8b87..f8db3d4affd9 100644 --- a/l1-contracts/src/core/libraries/Errors.sol +++ b/l1-contracts/src/core/libraries/Errors.sol @@ -101,9 +101,8 @@ library Errors { error ValidatorSelection__InvalidProposer(address expected, address actual); // 0xa8843a68 error ValidatorSelection__InvalidDeposit(address attester, address proposer); // 0x533169bd error ValidatorSelection__InsufficientAttestations(uint256 minimumNeeded, uint256 provided); // 0xaf47297f - error ValidatorSelection__InsufficientAttestationsProvided( - uint256 minimumNeeded, uint256 provided - ); // 0x4d4f66ac + error ValidatorSelection__InvalidCommitteeCommitment(bytes32 reconstructed, bytes32 expected); // 0x10816cae + error ValidatorSelection__InvalidAttestationsLength(uint256 expected, uint256 actual); // Staking error Staking__AlreadyActive(address attester); // 0x5e206fa4 diff --git a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol index 1265adb305f9..c3dad56750d2 100644 --- a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol +++ b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol @@ -4,13 +4,21 @@ pragma solidity ^0.8.27; import {Errors} from "@aztec/core/libraries/Errors.sol"; +// Attestation Signature struct Signature { - bool isEmpty; uint8 v; bytes32 r; bytes32 s; } +// A committee attestation can be made up of a signature and an address. +// Committee members that have attested will produce a signature, and if they have not attested, the signature will be empty and +// an address provided. +struct CommitteeAttestation { + address addr; + Signature signature; +} + library SignatureLib { /** * @notice The domain separator for the signatures @@ -28,7 +36,6 @@ library SignatureLib { * @param _digest - The digest that was signed */ function verify(Signature memory _signature, address _signer, bytes32 _digest) internal pure { - require(!_signature.isEmpty, Errors.SignatureLib__CannotVerifyEmpty()); address recovered = ecrecover(_digest, _signature.v, _signature.r, _signature.s); require(_signer == recovered, Errors.SignatureLib__InvalidSignature(_signer, recovered)); } diff --git a/l1-contracts/src/core/libraries/rollup/ExtRollupLib.sol b/l1-contracts/src/core/libraries/rollup/ExtRollupLib.sol index ce1a8d06c228..4b75bc0eff75 100644 --- a/l1-contracts/src/core/libraries/rollup/ExtRollupLib.sol +++ b/l1-contracts/src/core/libraries/rollup/ExtRollupLib.sol @@ -7,7 +7,7 @@ import {Epoch, Timestamp, TimeLib} from "@aztec/core/libraries/TimeLib.sol"; import {ValidatorSelectionLib} from "./../validator-selection/ValidatorSelectionLib.sol"; import {BlobLib} from "./BlobLib.sol"; import {EpochProofLib} from "./EpochProofLib.sol"; -import {ProposeLib, ProposeArgs, Signature} from "./ProposeLib.sol"; +import {ProposeLib, ProposeArgs, CommitteeAttestation} from "./ProposeLib.sol"; // We are using this library such that we can more easily "link" just a larger external library // instead of a few smaller ones. @@ -20,11 +20,11 @@ library ExtRollupLib { function propose( ProposeArgs calldata _args, - Signature[] memory _signatures, + CommitteeAttestation[] memory _attestations, bytes calldata _blobInput, bool _checkBlob ) external { - ProposeLib.propose(_args, _signatures, _blobInput, _checkBlob); + ProposeLib.propose(_args, _attestations, _blobInput, _checkBlob); } function initializeValidatorSelection(uint256 _targetCommitteeSize) external { diff --git a/l1-contracts/src/core/libraries/rollup/ProposeLib.sol b/l1-contracts/src/core/libraries/rollup/ProposeLib.sol index 7e5da1e0a84f..2dd006175b19 100644 --- a/l1-contracts/src/core/libraries/rollup/ProposeLib.sol +++ b/l1-contracts/src/core/libraries/rollup/ProposeLib.sol @@ -10,7 +10,7 @@ import { } from "@aztec/core/interfaces/IRollup.sol"; import {MerkleLib} from "@aztec/core/libraries/crypto/MerkleLib.sol"; import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {CommitteeAttestation} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {OracleInput, FeeLib, ManaBaseFeeComponents} from "@aztec/core/libraries/rollup/FeeLib.sol"; import {Timestamp, Slot, Epoch, TimeLib} from "@aztec/core/libraries/TimeLib.sol"; @@ -59,7 +59,7 @@ struct InterimProposeValues { */ struct ValidateHeaderArgs { Header header; - Signature[] attestations; + CommitteeAttestation[] attestations; bytes32 digest; Timestamp currentTime; uint256 manaBaseFee; @@ -77,12 +77,12 @@ library ProposeLib { * @dev `eth_log_handlers` rely on this function * * @param _args - The arguments to propose the block - * @param _signatures - Signatures from the validators + * @param _attestations - Signatures (or empty) from the validators * @param _blobInput - The blob evaluation KZG proof, challenge, and opening required for the precompile. */ function propose( ProposeArgs calldata _args, - Signature[] memory _signatures, + CommitteeAttestation[] memory _attestations, bytes calldata _blobInput, bool _checkBlob ) internal { @@ -109,7 +109,7 @@ library ProposeLib { validateHeader( ValidateHeaderArgs({ header: header, - attestations: _signatures, + attestations: _attestations, digest: digest( ProposePayload({ archive: _args.archive, diff --git a/l1-contracts/src/core/libraries/validator-selection/ValidatorSelectionLib.sol b/l1-contracts/src/core/libraries/validator-selection/ValidatorSelectionLib.sol index 3cb44e9c6bc8..abc306b49367 100644 --- a/l1-contracts/src/core/libraries/validator-selection/ValidatorSelectionLib.sol +++ b/l1-contracts/src/core/libraries/validator-selection/ValidatorSelectionLib.sol @@ -3,11 +3,13 @@ pragma solidity >=0.8.27; import {BlockHeaderValidationFlags} from "@aztec/core/interfaces/IRollup.sol"; -import { - EpochData, ValidatorSelectionStorage -} from "@aztec/core/interfaces/IValidatorSelection.sol"; +import {ValidatorSelectionStorage} from "@aztec/core/interfaces/IValidatorSelection.sol"; import {SampleLib} from "@aztec/core/libraries/crypto/SampleLib.sol"; -import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import { + SignatureLib, + Signature, + CommitteeAttestation +} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import { AddressSnapshotLib, @@ -26,6 +28,7 @@ library ValidatorSelectionLib { using SignatureLib for Signature; using TimeLib for Timestamp; using TimeLib for Epoch; + using TimeLib for Slot; using AddressSnapshotLib for SnapshottedAddressSet; using Checkpoints for Checkpoints.Trace224; using SafeCast for *; @@ -60,10 +63,10 @@ library ValidatorSelectionLib { //################ Committee ################ // If the committee is not set for this epoch, we need to sample it - EpochData storage epoch = store.epochs[_epochNumber]; - uint256 committeeLength = epoch.committee.length; - if (committeeLength == 0) { - epoch.committee = sampleValidators(_epochNumber, sampleSeed); + bytes32 committeeCommitment = store.committeeCommitments[_epochNumber]; + if (committeeCommitment == bytes32(0)) { + address[] memory committee = sampleValidators(_epochNumber, sampleSeed); + store.committeeCommitments[_epochNumber] = computeCommitteeCommitment(committee); } } @@ -80,29 +83,38 @@ library ValidatorSelectionLib { * - If the number of valid attestations is insufficient * * @param _slot - The slot of the block - * @param _signatures - The signatures of the committee members + * @param _attestations - The signatures (or empty; just address is provided) of the committee members * @param _digest - The digest of the block */ function verify( Slot _slot, Epoch _epochNumber, - Signature[] memory _signatures, + CommitteeAttestation[] memory _attestations, bytes32 _digest, BlockHeaderValidationFlags memory _flags ) internal { - address[] memory committee = getCommitteeAt(_epochNumber); + (bytes32 committeeCommitment, uint256 committeeSize) = getCommitteeCommitmentAt(_epochNumber); // @todo Consider getting rid of this option. // If the proposer is open, we allow anyone to propose without needing any signatures - if (committee.length == 0) { + if (committeeSize == 0) { return; } - address attester = committee[computeProposerIndex( - _epochNumber, _slot, getSampleSeed(_epochNumber), committee.length - )]; + require( + _attestations.length == committeeSize, + Errors.ValidatorSelection__InvalidAttestationsLength(committeeSize, _attestations.length) + ); + + uint256 proposerIndex = + computeProposerIndex(_epochNumber, _slot, getSampleSeed(_epochNumber), committeeSize); + + // We determine who the proposer from indexing into the provided attestations array, we then recover their proposer + // address from storage + // The user controls this value, however, if a false value is provided, the recalculated committee commitment will + // be incorrect, and we will revert. + address attester = _attestations[proposerIndex].addr; address proposer = StakingLib.getProposerForAttester(attester); - require(proposer != address(0), Errors.Staking__InvalidProposer()); require( proposer == msg.sender, Errors.ValidatorSelection__InvalidProposer(proposer, msg.sender) @@ -112,32 +124,40 @@ library ValidatorSelectionLib { return; } - uint256 needed = committee.length * 2 / 3 + 1; - require( - _signatures.length >= needed, - Errors.ValidatorSelection__InsufficientAttestationsProvided(needed, _signatures.length) - ); - // Validate the attestations + uint256 needed = committeeSize * 2 / 3 + 1; uint256 validAttestations = 0; + address[] memory reconstructedCommittee = new address[](committeeSize); + bytes32 digest = _digest.toEthSignedMessageHash(); - for (uint256 i = 0; i < _signatures.length; i++) { + for (uint256 i = 0; i < _attestations.length; i++) { // To avoid stack too deep errors - Signature memory signature = _signatures[i]; - if (signature.isEmpty) { - continue; + CommitteeAttestation memory attestation = _attestations[i]; + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/14283): as part of bitmap / storage optimisation, this check will change to whatever the bitmap includes + if (attestation.signature.v != 0) { + address recovered = ecrecover( + digest, attestation.signature.v, attestation.signature.r, attestation.signature.s + ); + reconstructedCommittee[i] = recovered; + validAttestations++; + } else { + reconstructedCommittee[i] = attestation.addr; } - - // The verification will throw if invalid - signature.verify(committee[i], digest); - validAttestations++; } require( validAttestations >= needed, Errors.ValidatorSelection__InsufficientAttestations(needed, validAttestations) ); + + // Check the committee commitment + bytes32 reconstructedCommitment = computeCommitteeCommitment(reconstructedCommittee); + if (reconstructedCommitment != committeeCommitment) { + revert Errors.ValidatorSelection__InvalidCommitteeCommitment( + reconstructedCommitment, committeeCommitment + ); + } } function getProposerAt(Slot _slot, Epoch _epochNumber) internal returns (address) { @@ -145,14 +165,14 @@ library ValidatorSelectionLib { // it does not need to actually return the full committee and then draw from it // it can just return the proposer directly, but then we duplicate the code // which we just don't have room for right now... - address[] memory committee = getCommitteeAt(_epochNumber); + uint224 sampleSeed = getSampleSeed(_epochNumber); + address[] memory committee = sampleValidators(_epochNumber, sampleSeed); if (committee.length == 0) { return address(0); } - address attester = committee[computeProposerIndex( - _epochNumber, _slot, getSampleSeed(_epochNumber), committee.length - )]; + address attester = + committee[computeProposerIndex(_epochNumber, _slot, sampleSeed, committee.length)]; return StakingLib.getProposerForAttester(attester); } @@ -167,31 +187,22 @@ library ValidatorSelectionLib { */ function sampleValidators(Epoch _epoch, uint224 _seed) internal returns (address[] memory) { ValidatorSelectionStorage storage store = getStorage(); - // We do -1, as the snapshots practically happen at the end of the block, e.g., - // a tx manipulating the set in at $t$ would be visible already at lookup $t$ if after that - // transactions. But reading at $t-1$ would be the state at the end of $t-1$ which is the state - // as we "start" time $t$. We then shift that back by an entire L2 epoch to guarantee - // we are not hit by last-minute changes or L1 reorgs when syncing validators from our clients. - Timestamp ts = Timestamp.wrap( - Timestamp.unwrap(_epoch.toTimestamp()) - TimeLib.getEpochDurationInSeconds() - 1 - ); - uint256 validatorSetSize = StakingLib.getAttesterCountAtTime(ts); - - if (validatorSetSize == 0) { - return new address[](0); - } - + uint32 ts = epochToSampleTime(_epoch); + uint256 validatorSetSize = StakingLib.getAttesterCountAtTime(Timestamp.wrap(ts)); uint256 targetCommitteeSize = store.targetCommitteeSize; + bool smallerCommittee = validatorSetSize <= targetCommitteeSize; + // If we have less validators than the target committee size, we just return the full set - if (validatorSetSize <= targetCommitteeSize) { - return StakingLib.getAttestersAtTime(ts); + if (smallerCommittee) { + return StakingLib.getAttestersAtTime(Timestamp.wrap(ts)); + } else { + // Sample the larger committee + uint256[] memory indices = + SampleLib.computeCommittee(targetCommitteeSize, validatorSetSize, _seed); + + return StakingLib.getAttestersFromIndicesAtTime(Timestamp.wrap(ts), indices); } - - uint256[] memory indices = - SampleLib.computeCommittee(targetCommitteeSize, validatorSetSize, _seed); - - return StakingLib.getAttestersFromIndicesAtTime(ts, indices); } /** @@ -199,17 +210,42 @@ library ValidatorSelectionLib { * * @param _epochNumber - The epoch to get the committee for * - * @return The committee for the epoch + * @return - The committee for the epoch */ function getCommitteeAt(Epoch _epochNumber) internal returns (address[] memory) { + uint224 seed = getSampleSeed(_epochNumber); + return sampleValidators(_epochNumber, seed); + } + + /** + * @notice Get the committee commitment for an epoch + * @param _epochNumber - + * @return committeeCommitment - The commitment to the current committee + * @return committeeSize - The size of the current committee + * + * @dev - intended as a view function, do not update state + */ + function getCommitteeCommitmentAt(Epoch _epochNumber) + internal + returns (bytes32 committeeCommitment, uint256 committeeSize) + { ValidatorSelectionStorage storage store = getStorage(); - EpochData storage epoch = store.epochs[_epochNumber]; - // If the committe is already set, just return that, otherwise need to sample - if (epoch.committee.length > 0) { - return epoch.committee; + committeeCommitment = store.committeeCommitments[_epochNumber]; + if (committeeCommitment == 0) { + // If no committee has been stored, then we need to setup the epoch + committeeCommitment = + computeCommitteeCommitment(sampleValidators(_epochNumber, getSampleSeed(_epochNumber))); } - return sampleValidators(_epochNumber, getSampleSeed(_epochNumber)); + + // We do not want to recalculate this each time + uint32 ts = epochToSampleTime(_epochNumber); + committeeSize = StakingLib.getAttesterCountAtTime(Timestamp.wrap(ts)); + if (committeeSize > store.targetCommitteeSize) { + committeeSize = store.targetCommitteeSize; + } + + return (committeeCommitment, committeeSize); } /** @@ -247,13 +283,20 @@ library ValidatorSelectionLib { } } + function epochToSampleTime(Epoch _epoch) internal view returns (uint32) { + // We do -1, as the snapshots practically happen at the end of the block, e.g., + // a tx manipulating the set in at $t$ would be visible already at lookup $t$ if after that + // transactions. But reading at $t-1$ would be the state at the end of $t-1$ which is the state + // as we "start" time $t$. We then shift that back by an entire L2 epoch to guarantee + // we are not hit by last-minute changes or L1 reorgs when syncing validators from our clients. + + return Timestamp.unwrap(_epoch.toTimestamp()).toUint32() + - uint32(TimeLib.getEpochDurationInSeconds()) - 1; + } + /** * @notice Get the sample seed for an epoch * - * @dev This should behave as walking past the line, but it does not currently do that. - * If there are entire skips, e.g., 1, 2, 5 and we then go back and try executing - * for 4 we will get an invalid value because we will read lastSeed which is from 5. - * * @dev The `_epoch` will never be 0 nor in the future * * @dev The return value will be equal to keccak256(n, block.prevrandao) for n being the @@ -263,11 +306,7 @@ library ValidatorSelectionLib { */ function getSampleSeed(Epoch _epoch) internal view returns (uint224) { ValidatorSelectionStorage storage store = getStorage(); - uint224 sampleSeed = store.seeds.upperLookup(Epoch.unwrap(_epoch).toUint32()); - if (sampleSeed == 0) { - sampleSeed = type(uint224).max; - } - return sampleSeed; + return store.seeds.upperLookup(Epoch.unwrap(_epoch).toUint32()); } function getStorage() internal pure returns (ValidatorSelectionStorage storage storageStruct) { @@ -292,6 +331,17 @@ library ValidatorSelectionLib { return uint224(uint256(keccak256(abi.encode(_epoch, block.prevrandao)))); } + /** + * @notice Computes the committee commitment for a given committee + * + * @param _committee - The committee to compute the commitment for + * + * @return The computed commitment + */ + function computeCommitteeCommitment(address[] memory _committee) private pure returns (bytes32) { + return keccak256(abi.encode(_committee)); + } + /** * @notice Computes the index of the committee member that acts as proposer for a given slot * diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 08a161e45dab..bfb626119019 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -6,7 +6,7 @@ import {DecoderBase} from "./base/DecoderBase.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {CommitteeAttestation} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Math} from "@oz/utils/math/Math.sol"; import {Registry} from "@aztec/governance/Registry.sol"; @@ -208,7 +208,7 @@ contract RollupTest is RollupBase { txHashes: new bytes32[](0) }); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidBlobHash.selector, blobHashes[0])); - rollup.propose(args, signatures, data.blobInputs); + rollup.propose(args, attestations, data.blobInputs); } function testInvalidBlobProof() public setUpFor("mixed_block_1") { @@ -236,7 +236,7 @@ contract RollupTest is RollupBase { txHashes: new bytes32[](0) }); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__InvalidBlobProof.selector, blobHashes[0])); - rollup.propose(args, signatures, blobInput); + rollup.propose(args, attestations, blobInput); } function testRevertPrune() public setUpFor("mixed_block_1") { @@ -300,7 +300,7 @@ contract RollupTest is RollupBase { oracleInput: OracleInput(0), txHashes: txHashes }); - rollup.propose(args, signatures, data.blobInputs); + rollup.propose(args, attestations, data.blobInputs); } function testInvalidL2Fee() public setUpFor("mixed_block_1") { @@ -330,7 +330,7 @@ contract RollupTest is RollupBase { oracleInput: OracleInput(0), txHashes: txHashes }); - rollup.propose(args, signatures, data.blobInputs); + rollup.propose(args, attestations, data.blobInputs); } function testProvingFeeUpdates() public setUpFor("mixed_block_1") { @@ -437,7 +437,7 @@ contract RollupTest is RollupBase { oracleInput: OracleInput(0), txHashes: new bytes32[](0) }); - rollup.propose(args, signatures, data.blobInputs); + rollup.propose(args, attestations, data.blobInputs); assertEq(testERC20.balanceOf(data.decodedHeader.coinbase), 0, "invalid coinbase balance"); } @@ -703,7 +703,7 @@ contract RollupTest is RollupBase { oracleInput: OracleInput(0), txHashes: txHashes }); - rollup.propose(args, signatures, new bytes(144)); + rollup.propose(args, attestations, new bytes(144)); } function testSubmitProofNonExistentBlock() public setUpFor("empty_block_1") { diff --git a/l1-contracts/test/RollupGetters.t.sol b/l1-contracts/test/RollupGetters.t.sol index e249313c6165..d53858f359aa 100644 --- a/l1-contracts/test/RollupGetters.t.sol +++ b/l1-contracts/test/RollupGetters.t.sol @@ -65,9 +65,13 @@ contract RollupShouldBeGetters is ValidatorSelectionTestBase { address[] memory committee = rollup.getEpochCommittee(e); address[] memory committee2 = rollup.getCommitteeAt(t); address[] memory committee3 = rollup.getCurrentEpochCommittee(); + (bytes32 committeeCommitment, uint256 committeeSize) = rollup.getCommitteeCommitmentAt(t); + assertEq(committee.length, expectedSize, "invalid getEpochCommittee"); assertEq(committee2.length, expectedSize, "invalid getCommitteeAt"); assertEq(committee3.length, expectedSize, "invalid getCurrentEpochCommittee"); + assertEq(committeeSize, expectedSize, "invalid getCommitteeCommittmentAt size"); + assertNotEq(committeeCommitment, bytes32(0), "invalid committee commitment"); (, bytes32[] memory writes) = vm.accesses(address(rollup)); assertEq(writes.length, 0, "No writes should be done"); @@ -89,9 +93,13 @@ contract RollupShouldBeGetters is ValidatorSelectionTestBase { address[] memory committee = rollup.getEpochCommittee(e); address[] memory committee2 = rollup.getCommitteeAt(t); address[] memory committee3 = rollup.getCurrentEpochCommittee(); + (bytes32 committeeCommitment, uint256 committeeSize) = rollup.getCommitteeCommitmentAt(t); + assertEq(committee.length, expectedSize, "invalid getEpochCommittee"); assertEq(committee2.length, expectedSize, "invalid getCommitteeAt"); assertEq(committee3.length, expectedSize, "invalid getCurrentEpochCommittee"); + assertEq(committeeSize, expectedSize, "invalid getCommitteeCommittmentAt size"); + assertNotEq(committeeCommitment, bytes32(0), "invalid committee commitment"); (, bytes32[] memory writes) = vm.accesses(address(rollup)); assertEq(writes.length, 0, "No writes should be done"); diff --git a/l1-contracts/test/base/RollupBase.sol b/l1-contracts/test/base/RollupBase.sol index 1ed601d5595c..773d32a34171 100644 --- a/l1-contracts/test/base/RollupBase.sol +++ b/l1-contracts/test/base/RollupBase.sol @@ -19,7 +19,7 @@ import { } from "@aztec/core/libraries/TimeLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {ProposeArgs, OracleInput, ProposeLib} from "@aztec/core/libraries/rollup/ProposeLib.sol"; -import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {CommitteeAttestation} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Inbox} from "@aztec/core/messagebridge/Inbox.sol"; import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; @@ -29,7 +29,7 @@ contract RollupBase is DecoderBase { Outbox internal outbox; MerkleTestUtil internal merkleTestUtil = new MerkleTestUtil(); - Signature[] internal signatures; + CommitteeAttestation[] internal attestations; mapping(uint256 => uint256) internal blockFees; @@ -189,7 +189,7 @@ contract RollupBase is DecoderBase { if (_revertMsg.length > 0) { vm.expectRevert(_revertMsg); } - rollup.propose(args, signatures, blobInputs); + rollup.propose(args, attestations, blobInputs); if (_revertMsg.length > 0) { return; diff --git a/l1-contracts/test/benchmark/happy.t.sol b/l1-contracts/test/benchmark/happy.t.sol index 80e8f43ba69c..93c4541146b3 100644 --- a/l1-contracts/test/benchmark/happy.t.sol +++ b/l1-contracts/test/benchmark/happy.t.sol @@ -8,7 +8,11 @@ import {stdStorage, StdStorage} from "forge-std/StdStorage.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import { + SignatureLib, + Signature, + CommitteeAttestation +} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Math} from "@oz/utils/math/Math.sol"; import {Registry} from "@aztec/governance/Registry.sol"; @@ -113,7 +117,7 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { struct Block { ProposeArgs proposeArgs; bytes blobInputs; - Signature[] signatures; + CommitteeAttestation[] attestations; } DecoderBase.Full full = load("empty_block_1"); @@ -127,7 +131,7 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { TestERC20 internal asset; FakeCanonical internal fakeCanonical; - Signature internal emptySignature; + CommitteeAttestation internal emptyAttestation; mapping(address attester => uint256 privateKey) internal attesterPrivateKeys; mapping(address proposer => address attester) internal proposerToAttester; @@ -186,10 +190,12 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { benchmark(); } + /// forge-config: default.isolate = true function test_48_validators() public prepare(48) { benchmark(); } + /// forge-config: default.isolate = true function test_100_validators() public prepare(100) { benchmark(); } @@ -233,12 +239,12 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { txHashes: txHashes }); - Signature[] memory signatures; + CommitteeAttestation[] memory attestations; { address[] memory validators = rollup.getEpochCommittee(rollup.getCurrentEpoch()); uint256 needed = validators.length * 2 / 3 + 1; - signatures = new Signature[](validators.length); + attestations = new CommitteeAttestation[](validators.length); bytes32 headerHash = HeaderLib.hash(proposeArgs.header); @@ -254,34 +260,49 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { for (uint256 i = 0; i < validators.length; i++) { if (i < needed) { - signatures[i] = createSignature(validators[i], digest); + attestations[i] = createAttestation(validators[i], digest); } else { - signatures[i] = Signature({isEmpty: true, v: 0, r: 0, s: 0}); + attestations[i] = createEmptyAttestation(validators[i]); } } } - return - Block({proposeArgs: proposeArgs, blobInputs: full.block.blobInputs, signatures: signatures}); + return Block({ + proposeArgs: proposeArgs, + blobInputs: full.block.blobInputs, + attestations: attestations + }); } - function createSignature(address _signer, bytes32 _digest) + function createAttestation(address _signer, bytes32 _digest) internal view - returns (Signature memory) + returns (CommitteeAttestation memory) { uint256 privateKey = attesterPrivateKeys[_signer]; bytes32 digest = _digest.toEthSignedMessageHash(); (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); - return Signature({isEmpty: false, v: v, r: r, s: s}); + Signature memory signature = Signature({v: v, r: r, s: s}); + // Address can be zero for signed attestations + return CommitteeAttestation({addr: _signer, signature: signature}); + } + + // This is used for attestations that are not signed - we include their address to help reconstruct the committee commitment + function createEmptyAttestation(address _signer) + internal + pure + returns (CommitteeAttestation memory) + { + Signature memory emptySignature = Signature({v: 0, r: 0, s: 0}); + return CommitteeAttestation({addr: _signer, signature: emptySignature}); } function benchmark() public { // Do nothing for the first epoch - Slot nextSlot = Slot.wrap(EPOCH_DURATION + 1); - Epoch nextEpoch = Epoch.wrap(2); + Slot nextSlot = Slot.wrap(EPOCH_DURATION * 2 + 1); + Epoch nextEpoch = Epoch.wrap(3); // Loop through all of the L1 metadata for (uint256 i = 0; i < l1Metadata.length; i++) { @@ -306,7 +327,7 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { targets[0] = address(rollup); bytes[] memory data = new bytes[](1); - data[0] = abi.encodeCall(IRollupCore.propose, (b.proposeArgs, b.signatures, b.blobInputs)); + data[0] = abi.encodeCall(IRollupCore.propose, (b.proposeArgs, b.attestations, b.blobInputs)); if (proposer == address(0)) { baseForwarder.forward(targets, data); diff --git a/l1-contracts/test/fees/FeeRollup.t.sol b/l1-contracts/test/fees/FeeRollup.t.sol index 402dd3bb3a8c..73d7c656922c 100644 --- a/l1-contracts/test/fees/FeeRollup.t.sol +++ b/l1-contracts/test/fees/FeeRollup.t.sol @@ -8,7 +8,11 @@ import {stdStorage, StdStorage} from "forge-std/StdStorage.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import { + SignatureLib, + Signature, + CommitteeAttestation +} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Math} from "@oz/utils/math/Math.sol"; import {Registry} from "@aztec/governance/Registry.sol"; @@ -76,7 +80,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { bytes body; bytes blobInputs; bytes32[] txHashes; - Signature[] signatures; + CommitteeAttestation[] attestations; } DecoderBase.Full full = load("empty_block_1"); @@ -134,7 +138,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { bytes32 archiveRoot = bytes32(Constants.GENESIS_ARCHIVE_ROOT); bytes32[] memory txHashes = new bytes32[](0); - Signature[] memory signatures = new Signature[](0); + CommitteeAttestation[] memory attestations = new CommitteeAttestation[](0); bytes memory body = full.block.body; bytes memory header = full.block.header; @@ -176,7 +180,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { body: body, blobInputs: full.block.blobInputs, txHashes: txHashes, - signatures: signatures + attestations: attestations }); } @@ -201,7 +205,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { }), txHashes: b.txHashes }), - b.signatures, + b.attestations, b.blobInputs ); nextSlot = nextSlot + Slot.wrap(1); @@ -297,7 +301,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { }), txHashes: b.txHashes }), - b.signatures, + b.attestations, b.blobInputs ); diff --git a/l1-contracts/test/ignition.t.sol b/l1-contracts/test/ignition.t.sol index e0f788a100bd..0123d2cfec94 100644 --- a/l1-contracts/test/ignition.t.sol +++ b/l1-contracts/test/ignition.t.sol @@ -24,6 +24,7 @@ import {Errors} from "@aztec/core/libraries/Errors.sol"; import {RollupBase, IInstance} from "./base/RollupBase.sol"; import {IRollup, RollupConfigInput} from "@aztec/core/interfaces/IRollup.sol"; import {RollupBuilder} from "./builder/RollupBuilder.sol"; +import {TimeCheater} from "./staking/TimeCheater.sol"; // solhint-disable comprehensive-interface /** @@ -42,6 +43,7 @@ contract IgnitionTest is RollupBase { TestERC20 internal testERC20; FeeJuicePortal internal feeJuicePortal; RewardDistributor internal rewardDistributor; + TimeCheater internal timeCheater; uint256 internal SLOT_DURATION; uint256 internal EPOCH_DURATION; @@ -54,6 +56,7 @@ contract IgnitionTest is RollupBase { ); SLOT_DURATION = TestConstants.AZTEC_SLOT_DURATION; EPOCH_DURATION = TestConstants.AZTEC_EPOCH_DURATION; + timeCheater = new TimeCheater(address(this), block.timestamp, SLOT_DURATION, EPOCH_DURATION); } /** diff --git a/l1-contracts/test/validator-selection/ValidatorSelection.t.sol b/l1-contracts/test/validator-selection/ValidatorSelection.t.sol index 2b2a5cb3b844..87ad2cce7d39 100644 --- a/l1-contracts/test/validator-selection/ValidatorSelection.t.sol +++ b/l1-contracts/test/validator-selection/ValidatorSelection.t.sol @@ -4,6 +4,7 @@ pragma solidity >=0.8.27; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; +import {Signature, CommitteeAttestation} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Timestamp, EpochLib, Epoch} from "@aztec/core/libraries/TimeLib.sol"; @@ -40,6 +41,21 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { using MessageHashUtils for bytes32; using EpochLib for Epoch; + // Test Block Flags + struct TestFlags { + bool provideEmptyAttestations; + bool invalidProposer; + bool proposerNotProvided; + bool invalidCommitteeCommitment; + } + + TestFlags NO_FLAGS = TestFlags({ + provideEmptyAttestations: true, + invalidProposer: false, + proposerNotProvided: false, + invalidCommitteeCommitment: false + }); + function testInitialCommitteeMatch() public setup(4) progressEpochs(2) { address[] memory attesters = rollup.getAttesters(); address[] memory committee = rollup.getCurrentEpochCommittee(); @@ -135,6 +151,12 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { } } + // NOTE: this must be run with --isolate as transient storage gets thrashed when working out the proposer. + // This also changes the committee which is calculated within each call. + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/14275): clear out transient storage used by the sample lib - we cannot afford to have a malicious proposer + // change the committee committment to something unpredictable. + + /// forge-config: default.isolate = true function testValidatorSetLargerThanCommittee(bool _insufficientSigs) public setup(100) @@ -143,7 +165,17 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { assertGt(rollup.getAttesters().length, rollup.getTargetCommitteeSize(), "Not enough validators"); uint256 committeeSize = rollup.getTargetCommitteeSize() * 2 / 3 + (_insufficientSigs ? 0 : 1); - _testBlock("mixed_block_1", _insufficientSigs, committeeSize, false); + _testBlock( + "mixed_block_1", + _insufficientSigs, + committeeSize, + TestFlags({ + provideEmptyAttestations: true, + invalidProposer: false, + proposerNotProvided: false, + invalidCommitteeCommitment: false + }) + ); assertEq( rollup.getEpochCommittee(rollup.getCurrentEpoch()).length, @@ -153,8 +185,8 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { } function testHappyPath() public setup(4) progressEpochs(2) { - _testBlock("mixed_block_1", false, 3, false); - _testBlock("mixed_block_2", false, 3, false); + _testBlock("mixed_block_1", false, 3, NO_FLAGS); + _testBlock("mixed_block_2", false, 3, NO_FLAGS); } function testNukeFromOrbit() public setup(4) progressEpochs(2) { @@ -163,8 +195,8 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { // got finalised. // This is LIKELY, not the action you really want to take, you want to slash // the people actually attesting, etc, but for simplicity we can do this as showcase. - _testBlock("mixed_block_1", false, 3, false); - _testBlock("mixed_block_2", false, 3, false); + _testBlock("mixed_block_1", false, 3, NO_FLAGS); + _testBlock("mixed_block_2", false, 3, NO_FLAGS); address[] memory attesters = rollup.getAttesters(); uint256[] memory stakes = new uint256[](attesters.length); @@ -195,24 +227,68 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { } function testInvalidProposer() public setup(4) progressEpochs(2) { - _testBlock("mixed_block_1", true, 3, true); + _testBlock( + "mixed_block_1", + true, + 3, + TestFlags({ + invalidProposer: true, + provideEmptyAttestations: true, + proposerNotProvided: false, + invalidCommitteeCommitment: false + }) + ); } - function testInsufficientSigs() public setup(4) progressEpochs(2) { - _testBlock("mixed_block_1", true, 2, false); + function testProposerNotProvided() public setup(4) progressEpochs(2) { + _testBlock( + "mixed_block_1", + true, + 3, + TestFlags({ + invalidProposer: false, + provideEmptyAttestations: true, + proposerNotProvided: true, + invalidCommitteeCommitment: false + }) + ); + } + + function testInvalidCommitteeCommitment() public setup(4) progressEpochs(2) { + _testBlock( + "mixed_block_1", + true, + 3, + TestFlags({ + invalidProposer: false, + provideEmptyAttestations: true, + proposerNotProvided: false, + invalidCommitteeCommitment: true + }) + ); } function testInsufficientSigsMove() public setup(4) progressEpochs(2) { rollup.getGSE().addRollup(address(0xdead)); assertEq(rollup.getCurrentEpochCommittee().length, 4); - _testBlock("mixed_block_1", true, 0, false); + _testBlock( + "mixed_block_1", + true, + 0, + TestFlags({ + provideEmptyAttestations: false, + invalidProposer: false, + proposerNotProvided: false, + invalidCommitteeCommitment: false + }) + ); } function _testBlock( string memory _name, bool _expectRevert, uint256 _signatureCount, - bool _invalidProposer + TestFlags memory _flags ) internal { DecoderBase.Full memory full = load(_name); bytes memory header = full.block.header; @@ -252,9 +328,10 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { address[] memory validators = rollup.getEpochCommittee(rollup.getCurrentEpoch()); ree.needed = validators.length * 2 / 3 + 1; - Signature[] memory signatures = new Signature[](_signatureCount); - - ProposePayload memory proposePayload = ProposePayload({ + // Pad out with empty (missing signature) attestations to make the committee commitment match + ree.provideEmptyAttestations = _flags.provideEmptyAttestations || !_expectRevert; + ree.attestationsCount = ree.provideEmptyAttestations ? validators.length : _signatureCount; + ree.proposePayload = ProposePayload({ archive: args.archive, stateReference: args.stateReference, oracleInput: args.oracleInput, @@ -262,9 +339,18 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { txHashes: args.txHashes }); - bytes32 digest = ProposeLib.digest(proposePayload); + CommitteeAttestation[] memory attestations = new CommitteeAttestation[](ree.attestationsCount); + + bytes32 digest = ProposeLib.digest(ree.proposePayload); for (uint256 i = 0; i < _signatureCount; i++) { - signatures[i] = createSignature(validators[i], digest); + attestations[i] = createAttestation(validators[i], digest); + } + + // We must include empty attestations to make the committee commitment match + if (ree.provideEmptyAttestations) { + for (uint256 i = _signatureCount; i < validators.length; i++) { + attestations[i] = createEmptyAttestation(validators[i]); + } } if (_expectRevert) { @@ -272,17 +358,15 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { if (_signatureCount < ree.needed) { vm.expectRevert( abi.encodeWithSelector( - Errors.ValidatorSelection__InsufficientAttestationsProvided.selector, + Errors.ValidatorSelection__InsufficientAttestations.selector, ree.needed, _signatureCount ) ); } - // @todo Handle SignatureLib__InvalidSignature case - // @todo Handle ValidatorSelection__InsufficientAttestations case } - if (_expectRevert && _invalidProposer) { + if (_expectRevert && _flags.invalidProposer) { address realProposer = ree.proposer; ree.proposer = address(uint160(uint256(keccak256(abi.encode("invalid", ree.proposer))))); vm.expectRevert( @@ -293,17 +377,62 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { ree.shouldRevert = true; } + // Set all attestations, including the propser's addr to 0 + if (_flags.proposerNotProvided) { + for (uint256 i = 0; i < attestations.length; ++i) { + attestations[i].addr = address(0); + } + + vm.expectRevert( + abi.encodeWithSelector( + Errors.ValidatorSelection__InvalidProposer.selector, address(0), ree.proposer + ) + ); + } + + if (_flags.invalidCommitteeCommitment) { + bytes32 correctCommitteeCommitment = keccak256(abi.encode(validators)); + + // Change the last element in the committee to a random address + address[] memory incorrectCommittee = validators; + uint256 invalidAttesterKey = uint256(keccak256(abi.encode("invalid", block.timestamp))); + address invalidAttester = vm.addr(invalidAttesterKey); + attesterPrivateKeys[invalidAttester] = invalidAttesterKey; + + incorrectCommittee[validators.length - 2] = invalidAttester; + attestations[validators.length - 2] = createAttestation(invalidAttester, digest); + + bytes32 incorrectCommitteeCommitment = keccak256(abi.encode(incorrectCommittee)); + + vm.expectRevert( + abi.encodeWithSelector( + Errors.ValidatorSelection__InvalidCommitteeCommitment.selector, + incorrectCommitteeCommitment, + correctCommitteeCommitment + ) + ); + } + emit log("Time to propose"); vm.prank(ree.proposer); - rollup.propose(args, signatures, full.block.blobInputs); - } else { - Signature[] memory signatures = new Signature[](0); + rollup.propose(args, attestations, full.block.blobInputs); + if (ree.shouldRevert) { + return; + } + } else { + CommitteeAttestation[] memory attestations = new CommitteeAttestation[](0); if (_expectRevert) { - vm.expectRevert(Errors.Staking__InvalidProposer.selector); + vm.expectRevert( + abi.encodeWithSelector( + Errors.ValidatorSelection__InvalidAttestationsLength.selector, + rollup.getCurrentEpochCommittee().length, + 0 + ) + ); ree.shouldRevert = true; } - rollup.propose(args, signatures, full.block.blobInputs); + rollup.propose(args, attestations, full.block.blobInputs); } assertEq(_expectRevert, ree.shouldRevert, "Does not match revert expectation"); @@ -362,4 +491,27 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { ); } } + + function createAttestation(address _signer, bytes32 _digest) + internal + view + returns (CommitteeAttestation memory) + { + uint256 privateKey = attesterPrivateKeys[_signer]; + + bytes32 digest = _digest.toEthSignedMessageHash(); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); + + Signature memory signature = Signature({v: v, r: r, s: s}); + return CommitteeAttestation({addr: _signer, signature: signature}); + } + + function createEmptyAttestation(address _signer) + internal + pure + returns (CommitteeAttestation memory) + { + Signature memory emptySignature = Signature({v: 0, r: 0, s: 0}); + return CommitteeAttestation({addr: _signer, signature: emptySignature}); + } } diff --git a/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol b/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol index 134169f1bd21..1f7b875ab605 100644 --- a/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol +++ b/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol @@ -20,6 +20,7 @@ import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; import {SlashFactory} from "@aztec/periphery/SlashFactory.sol"; import {Slasher} from "@aztec/core/slashing/Slasher.sol"; import {IValidatorSelection} from "@aztec/core/interfaces/IValidatorSelection.sol"; +import {ProposePayload} from "@aztec/core/libraries/rollup/ProposeLib.sol"; import {MultiAdder, CheatDepositArgs} from "@aztec/mock/MultiAdder.sol"; import {RollupBuilder} from "../builder/RollupBuilder.sol"; import {TimeCheater} from "../staking/TimeCheater.sol"; @@ -37,6 +38,9 @@ contract ValidatorSelectionTestBase is DecoderBase { uint256 needed; address proposer; bool shouldRevert; + bool provideEmptyAttestations; + uint256 attestationsCount; + ProposePayload proposePayload; } SlashFactory internal slashFactory; @@ -122,17 +126,4 @@ contract ValidatorSelectionTestBase is DecoderBase { return CheatDepositArgs({attester: attester, proposer: proposer, withdrawer: address(this)}); } - - function createSignature(address _signer, bytes32 _digest) - internal - view - returns (Signature memory) - { - uint256 privateKey = attesterPrivateKeys[_signer]; - - bytes32 digest = _digest.toEthSignedMessageHash(); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); - - return Signature({isEmpty: false, v: v, r: r, s: s}); - } } diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index bccceb9f01f7..29ce65461217 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -8,12 +8,11 @@ import { makeTuple } from '@aztec/foundation/array'; import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; import { times, timesParallel } from '@aztec/foundation/collection'; import { randomInt } from '@aztec/foundation/crypto'; -import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { toArray } from '@aztec/foundation/iterable'; import { sleep } from '@aztec/foundation/sleep'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { L2Block, wrapInBlock } from '@aztec/stdlib/block'; +import { CommitteeAttestation, L2Block, wrapInBlock } from '@aztec/stdlib/block'; import { type ContractClassPublic, type ContractInstanceWithAddress, @@ -64,7 +63,7 @@ export function describeArchiverDataStore( blockHash: `0x${l1BlockNumber}`, timestamp: BigInt(l1BlockNumber * 1000), }, - signatures: times(3, Signature.random), + attestations: times(3, CommitteeAttestation.random), }); const expectBlocksEqual = (actual: PublishedL2Block[], expected: PublishedL2Block[]) => { @@ -74,7 +73,7 @@ export function describeArchiverDataStore( const actualBlock = actual[i]; expect(actualBlock.l1).toEqual(expectedBlock.l1); expect(actualBlock.block.equals(expectedBlock.block)).toBe(true); - expect(actualBlock.signatures.every((s, i) => s.equals(expectedBlock.signatures[i]))).toBe(true); + expect(actualBlock.attestations.every((a, i) => a.equals(expectedBlock.attestations[i]))).toBe(true); } }; @@ -758,7 +757,7 @@ export function describeArchiverDataStore( return { block: block, - signatures: times(3, Signature.random), + attestations: times(3, CommitteeAttestation.random), l1: { blockNumber: BigInt(blockNumber), blockHash: `0x${blockNumber}`, timestamp: BigInt(blockNumber) }, }; }; @@ -876,7 +875,7 @@ export function describeArchiverDataStore( blocks = await timesParallel(numBlocks, async (index: number) => ({ block: await L2Block.random(index + 1, txsPerBlock, numPublicFunctionCalls, numPublicLogs), l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) }, - signatures: times(3, Signature.random), + attestations: times(3, CommitteeAttestation.random), })); await store.addBlocks(blocks); diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 7ef9f1679d23..4163eddf3c57 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -1,14 +1,18 @@ import { Blob, BlobDeserializationError } from '@aztec/blob-lib'; import type { BlobSinkClientInterface } from '@aztec/blob-sink/client'; -import type { EpochProofPublicInputArgs, ViemClient, ViemPublicClient } from '@aztec/ethereum'; +import type { + EpochProofPublicInputArgs, + ViemClient, + ViemCommitteeAttestation, + ViemPublicClient, +} from '@aztec/ethereum'; import { asyncPool } from '@aztec/foundation/async-pool'; import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; import type { EthAddress } from '@aztec/foundation/eth-address'; -import { Signature, type ViemSignature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { ForwarderAbi, type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; -import { Body, L2Block } from '@aztec/stdlib/block'; +import { Body, CommitteeAttestation, L2Block } from '@aztec/stdlib/block'; import { Proof } from '@aztec/stdlib/proofs'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; import { BlockHeader, GlobalVariables, ProposedBlockHeader, StateReference } from '@aztec/stdlib/tx'; @@ -36,7 +40,7 @@ export type RetrievedL2Block = { l1: L1PublishedData; chainId: Fr; version: Fr; - signatures: Signature[]; + attestations: CommitteeAttestation[]; }; export function retrievedBlockToPublishedL2Block(retrievedBlock: RetrievedL2Block): PublishedL2Block { @@ -49,7 +53,7 @@ export function retrievedBlockToPublishedL2Block(retrievedBlock: RetrievedL2Bloc l1, chainId, version, - signatures, + attestations, } = retrievedBlock; const archive = new AppendOnlyTreeSnapshot( @@ -82,7 +86,7 @@ export function retrievedBlockToPublishedL2Block(retrievedBlock: RetrievedL2Bloc return { block, l1, - signatures, + attestations, }; } @@ -196,7 +200,7 @@ async function processL2BlockProposedLogs( l1BlockNumber: log.blockNumber, l2BlockNumber, archive: archive.toString(), - signatures: block.signatures.map(signature => signature.toString()), + attestations: block.attestations, }); } else { logger.warn(`Ignoring L2 block ${l2BlockNumber} due to archive root mismatch`, { @@ -295,7 +299,7 @@ async function getBlockFromRollupTx( throw new Error(`Unexpected rollup method called ${rollupFunctionName}`); } - const [decodedArgs, signatures, _blobInput] = rollupArgs! as readonly [ + const [decodedArgs, attestations, _blobInput] = rollupArgs! as readonly [ { header: Hex; archive: Hex; @@ -306,7 +310,7 @@ async function getBlockFromRollupTx( }; txHashes: Hex[]; }, - ViemSignature[], + ViemCommitteeAttestation[], Hex, ]; @@ -341,7 +345,7 @@ async function getBlockFromRollupTx( stateReference, header, body, - signatures: signatures.map(Signature.fromViemSignature), + attestations: attestations.map(CommitteeAttestation.fromViem), }; } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index d8ce6369376e..94956aad6440 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -1,10 +1,9 @@ import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; -import { Signature } from '@aztec/foundation/eth-signature'; import { toArray } from '@aztec/foundation/iterable'; import { createLogger } from '@aztec/foundation/log'; import type { AztecAsyncKVStore, AztecAsyncMap, AztecAsyncSingleton, Range } from '@aztec/kv-store'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { Body, L2Block, L2BlockHash } from '@aztec/stdlib/block'; +import { Body, CommitteeAttestation, L2Block, L2BlockHash } from '@aztec/stdlib/block'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; import { BlockHeader, type IndexedTxEffect, TxHash, TxReceipt } from '@aztec/stdlib/tx'; @@ -19,7 +18,7 @@ type BlockStorage = { header: Buffer; archive: Buffer; l1: L1PublishedData; - signatures: Buffer[]; + attestations: Buffer[]; }; /** @@ -90,7 +89,7 @@ export class BlockStore { header: block.block.header.toBuffer(), archive: block.block.archive.toBuffer(), l1: block.l1, - signatures: block.signatures.map(sig => sig.toBuffer()), + attestations: block.attestations.map(attestation => attestation.toBuffer()), }); for (let i = 0; i < block.block.body.txEffects.length; i++) { @@ -221,8 +220,8 @@ export class BlockStore { } with hash ${await block.hash()})`, ); } - const signatures = blockStorage.signatures.map(Signature.fromBuffer); - return { block, l1: blockStorage.l1, signatures }; + const attestations = blockStorage.attestations.map(CommitteeAttestation.fromBuffer); + return { block, l1: blockStorage.l1, attestations }; } /** diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index 878fb11efd07..601fb464e24d 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -112,7 +112,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { blockHash: Buffer32.random().toString(), timestamp: BigInt(block.number), }, - signatures: [], + attestations: [], })); } diff --git a/yarn-project/aztec.js/src/test/anvil_test_watcher.ts b/yarn-project/aztec.js/src/test/anvil_test_watcher.ts index 7203b6811ccd..48d6f23e106a 100644 --- a/yarn-project/aztec.js/src/test/anvil_test_watcher.ts +++ b/yarn-project/aztec.js/src/test/anvil_test_watcher.ts @@ -22,9 +22,10 @@ export class AnvilTestWatcher { private rollup: GetContractReturnType; private rollupCheatCodes: RollupCheatCodes; + private l2SlotDuration!: bigint; private filledRunningPromise?: RunningPromise; - private mineIfOutdatedPromise?: RunningPromise; + private syncDateProviderPromise?: RunningPromise; private markingAsProvenRunningPromise?: RunningPromise; private logger: Logger = createLogger(`aztecjs:utils:watcher`); @@ -63,6 +64,9 @@ export class AnvilTestWatcher { throw new Error('Watcher already watching for filled slot'); } + const config = await this.rollupCheatCodes.getConfig(); + this.l2SlotDuration = config.slotDuration; + // If auto mining is not supported (e.g., we are on a real network), then we // will simple do nothing. But if on an anvil or the like, this make sure that // the sandbox and tests don't break because time is frozen and we never get to @@ -72,8 +76,8 @@ export class AnvilTestWatcher { if (isAutoMining) { this.filledRunningPromise = new RunningPromise(() => this.warpTimeIfNeeded(), this.logger, 200); this.filledRunningPromise.start(); - this.mineIfOutdatedPromise = new RunningPromise(() => this.mineIfOutdated(), this.logger, 200); - this.mineIfOutdatedPromise.start(); + this.syncDateProviderPromise = new RunningPromise(() => this.syncDateProviderToL1IfBehind(), this.logger, 200); + this.syncDateProviderPromise.start(); this.markingAsProvenRunningPromise = new RunningPromise(() => this.markAsProven(), this.logger, 200); this.markingAsProvenRunningPromise.start(); this.logger.info(`Watcher started for rollup at ${this.rollup.address}`); @@ -84,13 +88,13 @@ export class AnvilTestWatcher { async stop() { await this.filledRunningPromise?.stop(); - await this.mineIfOutdatedPromise?.stop(); + await this.syncDateProviderPromise?.stop(); await this.markingAsProvenRunningPromise?.stop(); } async trigger() { await this.filledRunningPromise?.trigger(); - await this.mineIfOutdatedPromise?.trigger(); + await this.syncDateProviderPromise?.trigger(); await this.markingAsProvenRunningPromise?.trigger(); } @@ -101,7 +105,7 @@ export class AnvilTestWatcher { await this.rollupCheatCodes.markAsProven(); } - async mineIfOutdated() { + async syncDateProviderToL1IfBehind() { // this doesn't apply to the sandbox, because we don't have a date provider in the sandbox if (!this.dateProvider) { return; @@ -109,15 +113,12 @@ export class AnvilTestWatcher { const l1Time = (await this.cheatcodes.timestamp()) * 1000; const wallTime = this.dateProvider.now(); - - // If the wall time is more than 24 seconds away from L1 time, - // mine a block and sync the clocks - if (Math.abs(wallTime - l1Time) > 24 * 1000) { - this.logger.warn(`Wall time is more than 24 seconds away from L1 time, mining a block and syncing clocks`); - await this.cheatcodes.evmMine(); - const newL1Time = await this.cheatcodes.timestamp(); - this.logger.info(`New L1 time: ${newL1Time}`); - this.dateProvider.setTime(newL1Time * 1000); + if (l1Time > wallTime) { + this.logger.warn(`L1 is ahead of wall time. Syncing wall time to L1 time`); + this.dateProvider.setTime(l1Time); + } else if (l1Time + Number(this.l2SlotDuration) * 1000 < wallTime) { + this.logger.warn(`L1 is more than 1 L2 slot behind wall time. Warping to wall time`); + await this.cheatcodes.warp(Math.ceil(wallTime / 1000)); } } diff --git a/yarn-project/aztec/src/sandbox/sandbox.ts b/yarn-project/aztec/src/sandbox/sandbox.ts index d6bfa3d3f582..50f345331d88 100644 --- a/yarn-project/aztec/src/sandbox/sandbox.ts +++ b/yarn-project/aztec/src/sandbox/sandbox.ts @@ -15,6 +15,7 @@ import { } from '@aztec/ethereum'; import { Fr } from '@aztec/foundation/fields'; import { type LogFn, createLogger } from '@aztec/foundation/log'; +import { DateProvider, TestDateProvider } from '@aztec/foundation/timer'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { protocolContractTreeRoot } from '@aztec/protocol-contracts'; import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe/server'; @@ -141,6 +142,7 @@ export async function createSandbox(config: Partial = {}, userLog const { genesisArchiveRoot, prefilledPublicData, fundingNeeded } = await getGenesisValues(fundedAddresses); let watcher: AnvilTestWatcher | undefined = undefined; + const dateProvider = new TestDateProvider(); if (!aztecNodeConfig.p2pEnabled) { const l1ContractAddresses = await deployContractsToL1(aztecNodeConfig, hdAccount, undefined, { assumeProvenThroughBlockNumber: Number.MAX_SAFE_INTEGER, @@ -159,7 +161,12 @@ export async function createSandbox(config: Partial = {}, userLog transport: fallback([httpViemTransport(l1RpcUrl)]) as any, }); - watcher = new AnvilTestWatcher(new EthCheatCodes([l1RpcUrl]), l1ContractAddresses.rollupAddress, publicClient); + watcher = new AnvilTestWatcher( + new EthCheatCodes([l1RpcUrl]), + l1ContractAddresses.rollupAddress, + publicClient, + dateProvider, + ); watcher.setIsSandbox(true); await watcher.start(); } @@ -167,7 +174,11 @@ export async function createSandbox(config: Partial = {}, userLog const telemetry = initTelemetryClient(getTelemetryClientConfig()); // Create a local blob sink client inside the sandbox, no http connectivity const blobSinkClient = createBlobSinkClient(); - const node = await createAztecNode(aztecNodeConfig, { telemetry, blobSinkClient }, { prefilledPublicData }); + const node = await createAztecNode( + aztecNodeConfig, + { telemetry, blobSinkClient, dateProvider }, + { prefilledPublicData }, + ); const pxeServiceConfig = { proverEnabled: aztecNodeConfig.realProofs }; const pxe = await createAztecPXE(node, pxeServiceConfig); @@ -200,7 +211,7 @@ export async function createSandbox(config: Partial = {}, userLog */ export async function createAztecNode( config: Partial = {}, - deps: { telemetry?: TelemetryClient; blobSinkClient?: BlobSinkClientInterface } = {}, + deps: { telemetry?: TelemetryClient; blobSinkClient?: BlobSinkClientInterface; dateProvider?: DateProvider } = {}, options: { prefilledPublicData?: PublicDataTreeLeaf[] } = {}, ) { // TODO(#12272): will clean this up. This is criminal. diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 7b76a951441b..4c8145259229 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -143,7 +143,7 @@ describe('L1Publisher integration', () => { getPublishedBlocks(from, limit, _proven) { return Promise.resolve( blocks.slice(from - 1, from - 1 + limit).map(block => ({ - signatures: [], + attestations: [], block, // Use L2 block number and hash for faking the L1 info l1: { diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 080436c5b80d..d79394a79886 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -85,6 +85,47 @@ describe('e2e_block_building', () => { afterEach(() => aztecNodeAdmin.setConfig({ minTxsPerBlock: 1 })); afterAll(() => teardown()); + it('processes txs until hitting timetable', async () => { + // We send enough txs so they are spread across multiple blocks, but not + // so many so that we don't end up hitting a reorg or timing out the tx wait(). + const TX_COUNT = 16; + + const ownerAddress = owner.getCompleteAddress().address; + const contract = await StatefulTestContract.deploy(owner, ownerAddress, ownerAddress, 1).send().deployed(); + logger.info(`Deployed stateful test contract at ${contract.address}`); + + // We have to set minTxsPerBlock to 1 or we could end with dangling txs. + // We also set enforceTimetable so the deadline makes sense, otherwise we may be starting the + // block too late into the slot, and start processing when the deadline has already passed. + logger.info(`Updating aztec node config`); + await aztecNodeAdmin.setConfig({ minTxsPerBlock: 0, maxTxsPerBlock: TX_COUNT, enforceTimeTable: true }); + + // We tweak the sequencer so it uses a fake simulator that adds a delay to every public tx. + const archiver = (aztecNode as AztecNodeService).getContractDataSource(); + sequencer.sequencer.publicProcessorFactory = new TestPublicProcessorFactory(archiver, dateProvider!); + + // We also cheat the sequencer's timetable so it allocates little time to processing. + // This will leave the sequencer with just a few seconds to build the block, so it shouldn't + // be able to squeeze in more than a few txs in each. This is sensitive to the time it takes + // to pick up and validate the txs, so we may need to bump it to work on CI. + jest + .spyOn(sequencer.sequencer.timetable, 'getBlockProposalExecTimeEnd') + .mockImplementation((secondsIntoSlot: number) => secondsIntoSlot + 1); + + // Flood the mempool with TX_COUNT simultaneous txs + const methods = times(TX_COUNT, i => contract.methods.increment_public_value(ownerAddress, i)); + const provenTxs = await asyncMap(methods, method => method.prove()); + logger.info(`Sending ${TX_COUNT} txs to the node`); + const txs = await Promise.all(provenTxs.map(tx => tx.send())); + logger.info(`All ${TX_COUNT} txs have been sent`, { txs: await Promise.all(txs.map(tx => tx.getTxHash())) }); + + // Await txs to be mined and assert they are mined across multiple different blocks. + const receipts = await Promise.all(txs.map(tx => tx.wait())); + const blockNumbers = receipts.map(r => r.blockNumber!).sort((a, b) => a - b); + logger.info(`Txs mined on blocks: ${unique(blockNumbers)}`); + expect(blockNumbers.at(-1)! - blockNumbers[0]).toBeGreaterThan(1); + }); + it('assembles a block with multiple txs', async () => { // Assemble N contract deployment txs // We need to create them sequentially since we cannot have parallel calls to a circuit @@ -189,47 +230,6 @@ describe('e2e_block_building', () => { logger.info(`Txs sent`); }); - it('processes txs until hitting timetable', async () => { - // We send enough txs so they are spread across multiple blocks, but not - // so many so that we don't end up hitting a reorg or timing out the tx wait(). - const TX_COUNT = 16; - - const ownerAddress = owner.getCompleteAddress().address; - const contract = await StatefulTestContract.deploy(owner, ownerAddress, ownerAddress, 1).send().deployed(); - logger.info(`Deployed stateful test contract at ${contract.address}`); - - // We have to set minTxsPerBlock to 1 or we could end with dangling txs. - // We also set enforceTimetable so the deadline makes sense, otherwise we may be starting the - // block too late into the slot, and start processing when the deadline has already passed. - logger.info(`Updating aztec node config`); - await aztecNodeAdmin.setConfig({ minTxsPerBlock: 0, maxTxsPerBlock: TX_COUNT, enforceTimeTable: true }); - - // We tweak the sequencer so it uses a fake simulator that adds a delay to every public tx. - const archiver = (aztecNode as AztecNodeService).getContractDataSource(); - sequencer.sequencer.publicProcessorFactory = new TestPublicProcessorFactory(archiver, dateProvider!); - - // We also cheat the sequencer's timetable so it allocates little time to processing. - // This will leave the sequencer with just a few seconds to build the block, so it shouldn't - // be able to squeeze in more than a few txs in each. This is sensitive to the time it takes - // to pick up and validate the txs, so we may need to bump it to work on CI. - jest - .spyOn(sequencer.sequencer.timetable, 'getBlockProposalExecTimeEnd') - .mockImplementation((secondsIntoSlot: number) => secondsIntoSlot + 1); - - // Flood the mempool with TX_COUNT simultaneous txs - const methods = times(TX_COUNT, i => contract.methods.increment_public_value(ownerAddress, i)); - const provenTxs = await asyncMap(methods, method => method.prove()); - logger.info(`Sending ${TX_COUNT} txs to the node`); - const txs = await Promise.all(provenTxs.map(tx => tx.send())); - logger.info(`All ${TX_COUNT} txs have been sent`, { txs: await Promise.all(txs.map(tx => tx.getTxHash())) }); - - // Await txs to be mined and assert they are mined across multiple different blocks. - const receipts = await Promise.all(txs.map(tx => tx.wait())); - const blockNumbers = receipts.map(r => r.blockNumber!).sort((a, b) => a - b); - logger.info(`Txs mined on blocks: ${unique(blockNumbers)}`); - expect(blockNumbers.at(-1)! - blockNumbers[0]).toBeGreaterThan(1); - }); - it.skip('can call public function from different tx in same block as deployed', async () => { // Ensure both txs will land on the same block await aztecNodeAdmin.setConfig({ minTxsPerBlock: 2 }); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts index 87c572f3461d..7ecc905ea1a5 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_empty_blocks.test.ts @@ -54,7 +54,10 @@ describe('e2e_epochs/epochs_empty_blocks', () => { await test.waitUntilEpochStarts(epochNumber + 1); const epochTargetBlockNumber = Number(await rollup.getBlockNumber()); logger.info(`Epoch ${epochNumber} ended with PENDING block number ${epochTargetBlockNumber}`); - await test.waitUntilL2BlockNumber(epochTargetBlockNumber); + await test.waitUntilL2BlockNumber( + epochTargetBlockNumber, + test.L2_SLOT_DURATION_IN_S * (epochTargetBlockNumber + 4), + ); provenBlockNumber = epochTargetBlockNumber; logger.info( `Reached PENDING L2 block ${epochTargetBlockNumber}, proving should now start, waiting for PROVEN block to reach ${provenBlockNumber}`, diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts index cfd7ab6d6569..c59453a3b38d 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_l1_reorgs.test.ts @@ -157,7 +157,7 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { it('prunes L2 blocks from pending chain removed from L1 due to an L1 reorg', async () => { // Wait until L2_BLOCK_NUMBER is mined and node synced, and stop the sequencer const L2_BLOCK_NUMBER = 3; - await test.waitUntilL2BlockNumber(L2_BLOCK_NUMBER, 60); + await test.waitUntilL2BlockNumber(L2_BLOCK_NUMBER, L2_SLOT_DURATION_IN_S * (L2_BLOCK_NUMBER + 4)); expect(monitor.l2BlockNumber).toEqual(L2_BLOCK_NUMBER); const l1BlockNumber = monitor.l1BlockNumber; await retryUntil(() => node.getBlockNumber().then(b => b === L2_BLOCK_NUMBER), 'node sync', 10, 0.1); @@ -178,7 +178,7 @@ describe('e2e_epochs/epochs_l1_reorgs', () => { it('sees new blocks added in an L1 reorg', async () => { // Wait until the block *before* L2_BLOCK_NUMBER is mined and node synced const L2_BLOCK_NUMBER = 3; - await test.waitUntilL2BlockNumber(L2_BLOCK_NUMBER - 1, 60); + await test.waitUntilL2BlockNumber(L2_BLOCK_NUMBER - 1, L2_SLOT_DURATION_IN_S * (L2_BLOCK_NUMBER + 4)); expect(monitor.l2BlockNumber).toEqual(L2_BLOCK_NUMBER - 1); const l1BlockNumber = monitor.l1BlockNumber; await retryUntil(() => node.getBlockNumber().then(b => b === L2_BLOCK_NUMBER - 1), 'node sync', 5, 0.1); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_manual_rollback.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_manual_rollback.test.ts index 8f88139b3a18..297a4b61b127 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_manual_rollback.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_manual_rollback.test.ts @@ -35,7 +35,7 @@ describe('e2e_epochs/manual_rollback', () => { it('manually rolls back', async () => { logger.info(`Starting manual rollback test to unfinalized block`); await context.sequencer?.updateSequencerConfig({ minTxsPerBlock: 0 }); - await test.waitUntilL2BlockNumber(4, 60); + await test.waitUntilL2BlockNumber(4, test.L2_SLOT_DURATION_IN_S * 6); await retryUntil(async () => await node.getBlockNumber().then(b => b >= 4), 'sync to 4', 10, 0.1); logger.info(`Synced to block 4. Pausing syncing and rolling back the chain.`); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts index d98ee5befabe..7ff01a993cf9 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_partial_proof.test.ts @@ -24,7 +24,7 @@ describe('e2e_epochs/epochs_partial_proof', () => { }); it('submits partial proofs when instructed manually', async () => { - await test.waitUntilL2BlockNumber(4, 60); + await test.waitUntilL2BlockNumber(4, test.L2_SLOT_DURATION_IN_S * 6); logger.info(`Kicking off partial proof`); await test.context.proverNode!.startProof(0); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.test.ts index ce10e1d9d7e6..547263fd29f5 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_fails.test.ts @@ -1,8 +1,7 @@ import { type Logger, getTimestampRangeForEpoch, sleep } from '@aztec/aztec.js'; import type { ViemClient } from '@aztec/ethereum'; import { RollupContract } from '@aztec/ethereum/contracts'; -import { ChainMonitor } from '@aztec/ethereum/test'; -import { type Delayer, waitUntilL1Timestamp } from '@aztec/ethereum/test'; +import { ChainMonitor, type Delayer, waitUntilL1Timestamp } from '@aztec/ethereum/test'; import { promiseWithResolvers } from '@aztec/foundation/promise'; import type { TestProverNode } from '@aztec/prover-node/test'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; @@ -59,7 +58,10 @@ describe('e2e_epochs/epochs_proof_fails', () => { // Wait until the last block of epoch 1 is published and then hold off the sequencer. // Note that the tx below will block the sequencer until it times out // the txPropagationMaxQueryAttempts until #10824 is fixed. - await test.waitUntilL2BlockNumber(blockNumberAtEndOfEpoch0 + test.epochDuration); + await test.waitUntilL2BlockNumber( + blockNumberAtEndOfEpoch0 + test.epochDuration, + test.L2_SLOT_DURATION_IN_S * (test.epochDuration + 4), + ); sequencerDelayer.pauseNextTxUntilTimestamp(epoch2Start + BigInt(L1_BLOCK_TIME_IN_S)); // Next sequencer to publish a block should trigger a rollback to block 1 diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index fc579dd51be1..f80057e182af 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -125,10 +125,10 @@ describe('e2e_p2p_network', () => { const dataStore = ((nodes[0] as AztecNodeService).getBlockSource() as Archiver).dataStore; const [block] = await dataStore.getPublishedBlocks(blockNumber, blockNumber); const payload = ConsensusPayload.fromBlock(block.block); - const attestations = block.signatures - .filter(s => !s.isEmpty) - .map(sig => new BlockAttestation(new Fr(blockNumber), payload, sig)); - const signers = attestations.map(att => att.getSender().toString()); + const attestations = block.attestations + .filter(a => !a.signature.isEmpty()) + .map(a => new BlockAttestation(new Fr(blockNumber), payload, a.signature)); + const signers = await Promise.all(attestations.map(att => att.getSender().toString())); t.logger.info(`Attestation signers`, { signers }); // Check that the signers found are part of the proposer nodes to ensure the archiver fetched them right diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts index 679bd0a789b7..ecfab3df3e41 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts @@ -196,10 +196,10 @@ describe('e2e_p2p_network', () => { const dataStore = ((nodes[0] as AztecNodeService).getBlockSource() as Archiver).dataStore; const [block] = await dataStore.getPublishedBlocks(blockNumber, blockNumber); const payload = ConsensusPayload.fromBlock(block.block); - const attestations = block.signatures - .filter(s => !s.isEmpty) - .map(sig => new BlockAttestation(new Fr(block.block.number), payload, sig)); - const signers = attestations.map(att => att.getSender().toString()); + const attestations = block.attestations + .filter(a => !a.signature.isEmpty()) + .map(a => new BlockAttestation(new Fr(blockNumber), payload, a.signature)); + const signers = await Promise.all(attestations.map(att => att.getSender().toString())); t.logger.info(`Attestation signers`, { signers }); // Check that the signers found are part of the proposer nodes to ensure the archiver fetched them right diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 55f9e24843f9..217ea6cdae8c 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -387,10 +387,13 @@ async function setupFromFresh( logger.info(`Funding rewardDistributor in ${rewardDistributorMintTxHash}`); } + const dateProvider = new TestDateProvider(); + const watcher = new AnvilTestWatcher( new EthCheatCodesWithState(aztecNodeConfig.l1RpcUrls), deployL1ContractsValues.l1ContractAddresses.rollupAddress, deployL1ContractsValues.l1Client, + dateProvider, ); await watcher.start(); @@ -423,7 +426,6 @@ async function setupFromFresh( await blobSink.start(); logger.verbose('Creating and synching an aztec node...'); - const dateProvider = new TestDateProvider(); const aztecNode = await AztecNodeService.createAndSync( aztecNodeConfig, { telemetry, dateProvider }, @@ -523,15 +525,16 @@ async function setupFromState(statePath: string, logger: Logger): Promise { const current = this.getEpochAndSlotNow(); const next = this.getEpochAndSlotInNextSlot(); return { - currentProposer: await this.getProposerAt(current), - nextProposer: await this.getProposerAt(next), currentSlot: current.slot, nextSlot: next.slot, + currentProposer: await this.getProposerAt(current), + nextProposer: await this.getProposerAt(next), }; } - private async getProposerAt(when: EpochAndSlot) { + getProposerInNextSlot(): Promise { + const epochAndSlot = this.getEpochAndSlotInNextSlot(); + + return this.getProposerAt(epochAndSlot); + } + + private async getProposerAt(when: EpochAndSlot): Promise { const { epoch, slot } = when; const { seed, committee } = await this.getCommittee(slot); + const proposerIndex = this.computeProposerIndex(slot, epoch, seed, BigInt(committee.length)); return committee[Number(proposerIndex)]; } diff --git a/yarn-project/ethereum/src/contracts/rollup.ts b/yarn-project/ethereum/src/contracts/rollup.ts index 4825fd23cd05..fea471d92990 100644 --- a/yarn-project/ethereum/src/contracts/rollup.ts +++ b/yarn-project/ethereum/src/contracts/rollup.ts @@ -17,6 +17,11 @@ import { formatViemError } from '../utils.js'; import { SlashingProposerContract } from './slashing_proposer.js'; import { checkBlockTag } from './utils.js'; +export type ViemCommitteeAttestation = { + addr: `0x${string}`; + signature: ViemSignature; +}; + export type L1RollupContractAddresses = Pick< L1ContractAddresses, | 'rollupAddress' @@ -321,7 +326,7 @@ export class RollupContract { public async validateHeader( args: readonly [ `0x${string}`, - ViemSignature[], + ViemCommitteeAttestation[], `0x${string}`, bigint, `0x${string}`, @@ -363,6 +368,7 @@ export class RollupContract { slotDuration = BigInt(slotDuration); } const timeOfNextL1Slot = (await this.client.getBlock()).timestamp + slotDuration; + try { const { result: [slot, blockNumber], diff --git a/yarn-project/ethereum/src/utils.ts b/yarn-project/ethereum/src/utils.ts index 74fa144d0c62..1f285f0273db 100644 --- a/yarn-project/ethereum/src/utils.ts +++ b/yarn-project/ethereum/src/utils.ts @@ -168,28 +168,6 @@ export function formatViemError(error: any, abi: Abi = ErrorsAbi): FormattedViem const errorClone = structuredClone(error); // Helper function to recursively remove ABI properties - const stripAbis = (obj: any) => { - if (!obj || typeof obj !== 'object') { - return; - } - - // Delete ABI property at current level - if ('abi' in obj) { - delete obj.abi; - } - - // Process cause property - if (obj.cause) { - stripAbis(obj.cause); - } - - // Process arrays and objects - Object.values(obj).forEach(value => { - if (value && typeof value === 'object') { - stripAbis(value); - } - }); - }; // Strip ABIs from the clone stripAbis(errorClone); @@ -203,156 +181,6 @@ export function formatViemError(error: any, abi: Abi = ErrorsAbi): FormattedViem return new FormattedViemError(error.message, (error as any)?.metaMessages); } - const truncateHex = (hex: string, length = 100) => { - if (!hex || typeof hex !== 'string') { - return hex; - } - if (!hex.startsWith('0x')) { - return hex; - } - if (hex.length <= length * 2) { - return hex; - } - // For extremely large hex strings, use more aggressive truncation - if (hex.length > 10000) { - return `${hex.slice(0, length)}...<${hex.length - length * 2} chars omitted>...${hex.slice(-length)}`; - } - return `${hex.slice(0, length)}...${hex.slice(-length)}`; - }; - - const replaceHexStrings = ( - text: string, - options: { - minLength?: number; - maxLength?: number; - truncateLength?: number; - pattern?: RegExp; - transform?: (hex: string) => string; - } = {}, - ): string => { - const { - minLength = 10, - maxLength = Infinity, - truncateLength = 100, - pattern, - transform = hex => truncateHex(hex, truncateLength), - } = options; - - const hexRegex = pattern ?? new RegExp(`(0x[a-fA-F0-9]{${minLength},${maxLength}})`, 'g'); - return text.replace(hexRegex, match => transform(match)); - }; - - const formatRequestBody = (body: string) => { - try { - // Special handling for eth_sendRawTransaction - if (body.includes('"method":"eth_sendRawTransaction"')) { - try { - const parsed = JSON.parse(body); - if (parsed.params && Array.isArray(parsed.params) && parsed.params.length > 0) { - // These are likely large transaction hex strings - parsed.params = parsed.params.map((param: any) => { - if (typeof param === 'string' && param.startsWith('0x') && param.length > 1000) { - return truncateHex(param, 200); - } - return param; - }); - } - return JSON.stringify(parsed, null, 2); - } catch { - // If specific parsing fails, fall back to regex-based truncation - return replaceHexStrings(body, { - pattern: /"params":\s*\[\s*"(0x[a-fA-F0-9]{1000,})"\s*\]/g, - transform: hex => `"params":["${truncateHex(hex, 200)}"]`, - }); - } - } - - // For extremely large request bodies, use simple truncation instead of parsing - if (body.length > 50000) { - const jsonStart = body.indexOf('{'); - const jsonEnd = body.lastIndexOf('}'); - if (jsonStart >= 0 && jsonEnd > jsonStart) { - return replaceHexStrings(body, { minLength: 10000, truncateLength: 200 }); - } - } - - const parsed = JSON.parse(body); - - // Recursively process all parameters that might contain hex strings - const processParams = (obj: any): any => { - if (Array.isArray(obj)) { - return obj.map(item => processParams(item)); - } - if (typeof obj === 'object' && obj !== null) { - const result: any = {}; - for (const [key, value] of Object.entries(obj)) { - result[key] = processParams(value); - } - return result; - } - if (typeof obj === 'string') { - if (obj.startsWith('0x')) { - return truncateHex(obj); - } - } - return obj; - }; - - // Process the entire request body - const processed = processParams(parsed); - return JSON.stringify(processed, null, 2); - } catch { - // If JSON parsing fails, do a simple truncation of any large hex strings - return replaceHexStrings(body, { minLength: 1000, truncateLength: 150 }); - } - }; - - const extractAndFormatRequestBody = (message: string): string => { - // First check if message is extremely large and contains very large hex strings - if (message.length > 50000) { - message = replaceHexStrings(message, { minLength: 10000, truncateLength: 200 }); - } - - // Add a specific check for RPC calls with large params - if (message.includes('"method":"eth_sendRawTransaction"')) { - message = replaceHexStrings(message, { - pattern: /"params":\s*\[\s*"(0x[a-fA-F0-9]{1000,})"\s*\]/g, - transform: hex => `"params":["${truncateHex(hex, 200)}"]`, - }); - } - - // First handle Request body JSON - const requestBodyRegex = /Request body: ({[\s\S]*?})\n/g; - let result = message.replace(requestBodyRegex, (match, body) => { - return `Request body: ${formatRequestBody(body)}\n`; - }); - - // Then handle Arguments section - const argsRegex = /((?:Request |Estimate Gas )?Arguments:[\s\S]*?(?=\n\n|$))/g; - result = result.replace(argsRegex, section => { - const lines = section.split('\n'); - const processedLines = lines.map(line => { - // Check if line contains a colon followed by content - const colonIndex = line.indexOf(':'); - if (colonIndex !== -1) { - const [prefix, content] = [line.slice(0, colonIndex + 1), line.slice(colonIndex + 1).trim()]; - // If content contains a hex string, truncate it - if (content.includes('0x')) { - const processedContent = replaceHexStrings(content); - return `${prefix} ${processedContent}`; - } - } - return line; - }); - return processedLines.join('\n'); - }); - - // Finally, catch any remaining hex strings in the message - result = replaceHexStrings(result); - - return result; - }; - // Extract the actual error message and highlight it for clarity let formattedRes = extractAndFormatRequestBody(error?.message || String(error)); @@ -390,6 +218,179 @@ export function formatViemError(error: any, abi: Abi = ErrorsAbi): FormattedViem return new FormattedViemError(formattedRes.replace(/\\n/g, '\n'), error?.metaMessages); } +function stripAbis(obj: any) { + if (!obj || typeof obj !== 'object') { + return; + } + + // Delete ABI property at current level + if ('abi' in obj) { + delete obj.abi; + } + + // Process cause property + if (obj.cause) { + stripAbis(obj.cause); + } + + // Process arrays and objects + Object.values(obj).forEach(value => { + if (value && typeof value === 'object') { + stripAbis(value); + } + }); +} + +function extractAndFormatRequestBody(message: string): string { + // First check if message is extremely large and contains very large hex strings + if (message.length > 50000) { + message = replaceHexStrings(message, { minLength: 10000, truncateLength: 200 }); + } + + // Add a specific check for RPC calls with large params + if (message.includes('"method":"eth_sendRawTransaction"')) { + message = replaceHexStrings(message, { + pattern: /"params":\s*\[\s*"(0x[a-fA-F0-9]{1000,})"\s*\]/g, + transform: hex => `"params":["${truncateHex(hex, 200)}"]`, + }); + } + + // First handle Request body JSON + const requestBodyRegex = /Request body: ({[\s\S]*?})\n/g; + let result = message.replace(requestBodyRegex, (match, body) => { + return `Request body: ${formatRequestBody(body)}\n`; + }); + + // Then handle Arguments section + const argsRegex = /((?:Request |Estimate Gas )?Arguments:[\s\S]*?(?=\n\n|$))/g; + result = result.replace(argsRegex, section => { + const lines = section.split('\n'); + const processedLines = lines.map(line => { + // Check if line contains a colon followed by content + const colonIndex = line.indexOf(':'); + if (colonIndex !== -1) { + const [prefix, content] = [line.slice(0, colonIndex + 1), line.slice(colonIndex + 1).trim()]; + // If content contains a hex string, truncate it + if (content.includes('0x')) { + const processedContent = replaceHexStrings(content); + return `${prefix} ${processedContent}`; + } + } + return line; + }); + return processedLines.join('\n'); + }); + + // Finally, catch any remaining hex strings in the message + result = replaceHexStrings(result); + + return result; +} + +function truncateHex(hex: string, length = 100) { + if (!hex || typeof hex !== 'string') { + return hex; + } + if (!hex.startsWith('0x')) { + return hex; + } + if (hex.length <= length * 2) { + return hex; + } + // For extremely large hex strings, use more aggressive truncation + if (hex.length > 10000) { + return `${hex.slice(0, length)}...<${hex.length - length * 2} chars omitted>...${hex.slice(-length)}`; + } + return `${hex.slice(0, length)}...${hex.slice(-length)}`; +} + +function replaceHexStrings( + text: string, + options: { + minLength?: number; + maxLength?: number; + truncateLength?: number; + pattern?: RegExp; + transform?: (hex: string) => string; + } = {}, +): string { + const { + minLength = 10, + maxLength = Infinity, + truncateLength = 100, + pattern, + transform = hex => truncateHex(hex, truncateLength), + } = options; + + const hexRegex = pattern ?? new RegExp(`(0x[a-fA-F0-9]{${minLength},${maxLength}})`, 'g'); + return text.replace(hexRegex, match => transform(match)); +} + +function formatRequestBody(body: string) { + try { + // Special handling for eth_sendRawTransaction + if (body.includes('"method":"eth_sendRawTransaction"')) { + try { + const parsed = JSON.parse(body); + if (parsed.params && Array.isArray(parsed.params) && parsed.params.length > 0) { + // These are likely large transaction hex strings + parsed.params = parsed.params.map((param: any) => { + if (typeof param === 'string' && param.startsWith('0x') && param.length > 1000) { + return truncateHex(param, 200); + } + return param; + }); + } + return JSON.stringify(parsed, null, 2); + } catch { + // If specific parsing fails, fall back to regex-based truncation + return replaceHexStrings(body, { + pattern: /"params":\s*\[\s*"(0x[a-fA-F0-9]{1000,})"\s*\]/g, + transform: hex => `"params":["${truncateHex(hex, 200)}"]`, + }); + } + } + + // For extremely large request bodies, use simple truncation instead of parsing + if (body.length > 50000) { + const jsonStart = body.indexOf('{'); + const jsonEnd = body.lastIndexOf('}'); + if (jsonStart >= 0 && jsonEnd > jsonStart) { + return replaceHexStrings(body, { minLength: 10000, truncateLength: 200 }); + } + } + + const parsed = JSON.parse(body); + + // Process the entire request body + const processed = processParams(parsed); + return JSON.stringify(processed, null, 2); + } catch { + // If JSON parsing fails, do a simple truncation of any large hex strings + return replaceHexStrings(body, { minLength: 1000, truncateLength: 150 }); + } +} + +// Recursively process all parameters that might contain hex strings +function processParams(obj: any): any { + if (Array.isArray(obj)) { + return obj.map(item => processParams(item)); + } + if (typeof obj === 'object' && obj !== null) { + const result: any = {}; + for (const [key, value] of Object.entries(obj)) { + result[key] = processParams(value); + } + return result; + } + if (typeof obj === 'string') { + if (obj.startsWith('0x')) { + return truncateHex(obj); + } + } + return obj; +} + export function tryGetCustomErrorName(err: any) { try { // See https://viem.sh/docs/contract/simulateContract#handling-custom-errors diff --git a/yarn-project/foundation/src/eth-signature/eth_signature.test.ts b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts index ec76be5b6fa0..b8907d20e517 100644 --- a/yarn-project/foundation/src/eth-signature/eth_signature.test.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts @@ -43,21 +43,21 @@ describe('eth signature', () => { }); it('should serialize and deserialize to hex string with v=0', () => { - const signature = new Signature(Buffer32.random(), Buffer32.random(), 0, false); + const signature = new Signature(Buffer32.random(), Buffer32.random(), 0); const serialized = signature.toString(); const deserialized = Signature.fromString(serialized); checkEquivalence(signature, deserialized); }); it('should serialize and deserialize to hex string with 1-digit v', () => { - const signature = new Signature(Buffer32.random(), Buffer32.random(), 1, false); + const signature = new Signature(Buffer32.random(), Buffer32.random(), 1); const serialized = signature.toString(); const deserialized = Signature.fromString(serialized); checkEquivalence(signature, deserialized); }); it('should serialize and deserialize to hex string with 2-digit v', () => { - const signature = new Signature(Buffer32.random(), Buffer32.random(), 26, false); + const signature = new Signature(Buffer32.random(), Buffer32.random(), 26); const serialized = signature.toString(); const deserialized = Signature.fromString(serialized); checkEquivalence(signature, deserialized); diff --git a/yarn-project/foundation/src/eth-signature/eth_signature.ts b/yarn-project/foundation/src/eth-signature/eth_signature.ts index 109b7320d044..d4dfbe53f9e1 100644 --- a/yarn-project/foundation/src/eth-signature/eth_signature.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.ts @@ -12,7 +12,6 @@ export type ViemSignature = { r: `0x${string}`; s: `0x${string}`; v: number; - isEmpty: boolean; }; /** @@ -22,6 +21,8 @@ export class Signature { // Cached values private size: number | undefined; + public readonly empty: boolean; + constructor( /** The r value of the signature */ public readonly r: Buffer32, @@ -29,9 +30,9 @@ export class Signature { public readonly s: Buffer32, /** The v value of the signature */ public readonly v: number, - /** Does this struct store an empty signature */ - public readonly isEmpty: boolean = false, - ) {} + ) { + this.empty = r.isZero() && s.isZero() && v === 0; + } static fromBuffer(buf: Buffer | BufferReader): Signature { const reader = BufferReader.asReader(buf); @@ -40,9 +41,7 @@ export class Signature { const s = reader.readObject(Buffer32); const v = reader.readNumber(); - const isEmpty = r.isZero() && s.isZero(); - - return new Signature(r, s, v, isEmpty); + return new Signature(r, s, v); } static isValidString(sig: `0x${string}`): boolean { @@ -61,30 +60,27 @@ export class Signature { const s = reader.readObject(Buffer32); const v = parseInt(sig.slice(2 + 64 * 2), 16); - const isEmpty = r.isZero() && s.isZero(); - - return new Signature(r, s, v, isEmpty); + return new Signature(r, s, v); } static fromViemSignature(sig: ViemSignature): Signature { - return new Signature( - Buffer32.fromBuffer(hexToBuffer(sig.r)), - Buffer32.fromBuffer(hexToBuffer(sig.s)), - sig.v, - sig.isEmpty, - ); + return new Signature(Buffer32.fromBuffer(hexToBuffer(sig.r)), Buffer32.fromBuffer(hexToBuffer(sig.s)), sig.v); } static random(): Signature { - return new Signature(Buffer32.random(), Buffer32.random(), Math.floor(Math.random() * 2), false); + return new Signature(Buffer32.random(), Buffer32.random(), Math.floor(Math.random() * 2)); } static empty(): Signature { - return new Signature(Buffer32.ZERO, Buffer32.ZERO, 0, true); + return new Signature(Buffer32.ZERO, Buffer32.ZERO, 0); + } + + isEmpty(): boolean { + return this.empty; } equals(other: Signature): boolean { - return this.r.equals(other.r) && this.s.equals(other.s) && this.v === other.v && this.isEmpty === other.isEmpty; + return this.r.equals(other.r) && this.s.equals(other.s) && this.v === other.v && this.empty === other.empty; } toBuffer(): Buffer { @@ -115,7 +111,6 @@ export class Signature { r: this.r.toString(), s: this.s.toString(), v: this.v, - isEmpty: this.isEmpty, }; } diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index 87405762212f..836213f90fe7 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -113,6 +113,7 @@ describe('SequencerPublisher', () => { const epochCache = mock(); epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1n, slot: 2n, ts: 3n }); + epochCache.getCommittee.mockResolvedValue({ committee: [], seed: 1n, epoch: 1n }); publisher = new SequencerPublisher(config, { blobSinkClient, @@ -311,10 +312,6 @@ describe('SequencerPublisher', () => { }); it('does not send requests if no valid requests are found', async () => { - const epochCache = (publisher as any).epochCache as MockProxy; - - epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1n, slot: 2n, ts: 3n }); - publisher.addRequest({ action: 'propose', request: { diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index c25b5e334766..75673256f59b 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -20,10 +20,10 @@ import { import type { L1TxUtilsWithBlobs } from '@aztec/ethereum/l1-tx-utils-with-blobs'; import { toHex as toPaddedHex } from '@aztec/foundation/bigint-buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; -import type { Signature } from '@aztec/foundation/eth-signature'; import { createLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import { ForwarderAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { CommitteeAttestation } from '@aztec/stdlib/block'; import { ConsensusPayload, SignatureDomainSeparator, getHashedSignaturePayload } from '@aztec/stdlib/p2p'; import type { L1PublishBlockStats } from '@aztec/stdlib/stats'; import { type ProposedBlockHeader, TxHash } from '@aztec/stdlib/tx'; @@ -48,7 +48,7 @@ type L1ProcessArgs = { /** L2 block tx hashes */ txHashes: TxHash[]; /** Attestations */ - attestations?: Signature[]; + attestations?: CommitteeAttestation[]; }; export enum VoteType { @@ -74,7 +74,7 @@ interface RequestWithExpiry { export class SequencerPublisher { private interrupted = false; private metrics: SequencerPublisherMetrics; - private epochCache: EpochCache; + public epochCache: EpochCache; private forwarderContract: ForwarderContract; protected governanceLog = createLogger('sequencer:publisher:governance'); @@ -265,6 +265,7 @@ export class SequencerPublisher { */ public canProposeAtNextEthBlock(tipArchive: Buffer) { const ignoredErrors = ['SlotAlreadyInChain', 'InvalidProposer', 'InvalidArchive']; + return this.rollupContract .canProposeAtNextEthBlock(tipArchive, this.getForwarderAddress().toString(), this.ethereumSlotDuration) .catch(err => { @@ -288,19 +289,29 @@ export class SequencerPublisher { */ public async validateBlockForSubmission( header: ProposedBlockHeader, - attestationData: { digest: Buffer; signatures: Signature[] } = { + attestationData: { digest: Buffer; attestations: CommitteeAttestation[] } = { digest: Buffer.alloc(32), - signatures: [], + attestations: [], }, ): Promise { const ts = BigInt((await this.l1TxUtils.getBlock()).timestamp + this.ethereumSlotDuration); - const formattedSignatures = attestationData.signatures.map(attest => attest.toViemSignature()); - const flags = { ignoreDA: true, ignoreSignatures: formattedSignatures.length == 0 }; + // If we have no attestations, we still need to provide the empty attestations + // so that the committee is recalculated correctly + const ignoreSignatures = attestationData.attestations.length === 0; + if (ignoreSignatures) { + const committee = await this.epochCache.getCommittee(header.slotNumber.toBigInt()); + attestationData.attestations = committee.committee.map(committeeMember => + CommitteeAttestation.fromAddress(committeeMember), + ); + } + + const formattedAttestations = attestationData.attestations.map(attest => attest.toViem()); + const flags = { ignoreDA: true, ignoreSignatures }; const args = [ toHex(header.toBuffer()), - formattedSignatures, + formattedAttestations, toHex(attestationData.digest), ts, toHex(header.contentCommitment.blobsHash), @@ -405,7 +416,7 @@ export class SequencerPublisher { */ public async enqueueProposeL2Block( block: L2Block, - attestations?: Signature[], + attestations?: CommitteeAttestation[], txHashes?: TxHash[], opts: { txTimeoutAt?: Date } = {}, ): Promise { @@ -431,7 +442,7 @@ export class SequencerPublisher { // make time consistency checks break. const ts = await this.validateBlockForSubmission(proposedBlockHeader, { digest: digest.toBuffer(), - signatures: attestations ?? [], + attestations: attestations ?? [], }); this.log.debug(`Submitting propose transaction`); @@ -486,9 +497,7 @@ export class SequencerPublisher { throw new Error('Failed to validate blobs'); }); - const attestations = encodedData.attestations - ? encodedData.attestations.map(attest => attest.toViemSignature()) - : []; + const attestations = encodedData.attestations ? encodedData.attestations.map(attest => attest.toViem()) : []; const txHashes = encodedData.txHashes ? encodedData.txHashes.map(txHash => txHash.toString()) : []; const args = [ { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 6152d164ec34..9c37780d71b1 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -1,8 +1,9 @@ import { Body, L2Block } from '@aztec/aztec.js'; import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants'; +import type { EpochCache } from '@aztec/epoch-cache'; import { DefaultL1ContractsConfig } from '@aztec/ethereum'; -import { Buffer32 } from '@aztec/foundation/buffer'; import { times, timesParallel } from '@aztec/foundation/collection'; +import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; @@ -15,7 +16,7 @@ import type { PublicProcessor, PublicProcessorFactory } from '@aztec/simulator/s import type { SlasherClient } from '@aztec/slasher'; import { PublicDataWrite } from '@aztec/stdlib/avm'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { L2BlockSource } from '@aztec/stdlib/block'; +import { CommitteeAttestation, type L2BlockSource } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import { Gas, GasFees } from '@aztec/stdlib/gas'; import { @@ -35,7 +36,7 @@ import { BlockHeader, GlobalVariables, type Tx, TxHash, makeProcessedTxFromPriva import type { ValidatorClient } from '@aztec/validator-client'; import { expect } from '@jest/globals'; -import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; +import { type MockProxy, mock, mockDeep, mockFn } from 'jest-mock-extended'; import type { GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import type { SequencerPublisher } from '../publisher/sequencer-publisher.js'; @@ -76,10 +77,12 @@ describe('sequencer', () => { let feeRecipient: AztecAddress; const gasFees = GasFees.empty(); - const mockedSig = new Signature(Buffer32.fromField(Fr.random()), Buffer32.fromField(Fr.random()), 27); - const committee = [EthAddress.random()]; + const signer = Secp256k1Signer.random(); + const mockedSig = Signature.random(); + const mockedAttestation = new CommitteeAttestation(signer.address, mockedSig); + const committee = [signer.address]; - const getSignatures = () => [mockedSig]; + const getSignatures = () => [mockedAttestation]; const getAttestations = () => { const attestation = new BlockAttestation( @@ -159,7 +162,11 @@ describe('sequencer', () => { gasFees, ); - publisher = mock(); + const epochCache = mockDeep(); + epochCache.getEpochAndSlotInNextSlot.mockImplementation(() => ({ epoch: 1n, slot: 1n, ts: 1n })); + + publisher = mockDeep(); + publisher.epochCache = epochCache; publisher.getSenderAddress.mockImplementation(() => EthAddress.random()); publisher.getForwarderAddress.mockImplementation(() => EthAddress.random()); publisher.getCurrentEpochCommittee.mockResolvedValue(committee); @@ -538,9 +545,6 @@ describe('sequencer', () => { publisher.canProposeAtNextEthBlock.mockResolvedValueOnce(undefined); await sequencer.doRealWork(); expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); - // even though the chain tip moved, the sequencer should still have tried to build a block against the old archive - // this should get caught by the rollup - expect(publisher.canProposeAtNextEthBlock).toHaveBeenCalledWith(currentTip.archive.root.toBuffer()); }); it('aborts building a block if the chain moves underneath it', async () => { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index d4b1ba6ca605..057f529dd5f3 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -1,9 +1,8 @@ import { type L2Block, retryUntil } from '@aztec/aztec.js'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; -import type { ViemPublicClient } from '@aztec/ethereum'; +import { FormattedViemError, type ViemPublicClient } from '@aztec/ethereum'; import { omit } from '@aztec/foundation/collection'; import { EthAddress } from '@aztec/foundation/eth-address'; -import type { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; @@ -14,7 +13,7 @@ import type { BlockBuilderFactory } from '@aztec/prover-client/block-builder'; import type { PublicProcessorFactory } from '@aztec/simulator/server'; import type { SlasherClient } from '@aztec/slasher'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { L2BlockSource } from '@aztec/stdlib/block'; +import type { CommitteeAttestation, L2BlockSource } from '@aztec/stdlib/block'; import type { ContractDataSource } from '@aztec/stdlib/contract'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { Gas } from '@aztec/stdlib/gas'; @@ -271,10 +270,18 @@ export class Sequencer { // If we cannot find a tip archive, assume genesis. const chainTipArchive = chainTip.archive; - const slot = await this.slotForProposal(chainTipArchive.toBuffer(), BigInt(newBlockNumber)); + const { slot } = this.publisher.epochCache.getEpochAndSlotInNextSlot(); this.metrics.observeSlotChange(slot, this.publisher.getSenderAddress().toString()); - if (!slot) { - this.log.debug(`Cannot propose block ${newBlockNumber}`); + + const proposerInNextSlot = await this.publisher.epochCache.getProposerInNextSlot(); + + // If get proposer in next slot is undefined, then there is no proposer set, and it is in free for all (sandbox) so we continue + // If we calculate a proposer in the next slot, and it is not us, then stop + if (proposerInNextSlot !== undefined && !proposerInNextSlot.equals(this.validatorClient!.getValidatorAddress())) { + this.log.debug(`Cannot propose block ${newBlockNumber}`, { + us: this.validatorClient!.getValidatorAddress(), + proposer: proposerInNextSlot, + }); return; } @@ -322,7 +329,12 @@ export class Sequencer { const pendingTxs = this.p2pClient.iteratePendingTxs(); await this.buildBlockAndEnqueuePublish(pendingTxs, proposalHeader, newGlobalVariables).catch(err => { - this.log.error(`Error building/enqueuing block`, err, { blockNumber: newBlockNumber, slot }); + if (err instanceof FormattedViemError) { + this.log.verbose(`Unable to build/enqueue block ${err.message}`); + return; + } else { + this.log.error(`Error building/enqueuing block`, err, { blockNumber: newBlockNumber, slot }); + } }); finishedFlushing = true; } else { @@ -374,29 +386,6 @@ export class Sequencer { return this.publisher.getForwarderAddress(); } - /** - * Checks if we can propose at the next block and returns the slot number if we can. - * @param tipArchive - The archive of the previous block. - * @param proposalBlockNumber - The block number of the proposal. - * @returns The slot number if we can propose at the next block, otherwise undefined. - */ - async slotForProposal(tipArchive: Buffer, proposalBlockNumber: bigint): Promise { - const result = await this.publisher.canProposeAtNextEthBlock(tipArchive); - - if (!result) { - return undefined; - } - - const [slot, blockNumber] = result; - - if (proposalBlockNumber !== blockNumber) { - const msg = `Sequencer block number mismatch. Expected ${proposalBlockNumber} but got ${blockNumber}.`; - this.log.warn(msg); - throw new Error(msg); - } - return slot; - } - /** * Sets the sequencer state and checks if we have enough time left in the slot to transition to the new state. * @param proposedState - The new state to transition to. @@ -663,7 +652,7 @@ export class Sequencer { [Attributes.BLOCK_ARCHIVE]: block.archive.toString(), [Attributes.BLOCK_TXS_COUNT]: txHashes.length, })) - protected async collectAttestations(block: L2Block, txs: Tx[]): Promise { + protected async collectAttestations(block: L2Block, txs: Tx[]): Promise { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7962): inefficient to have a round trip in here - this should be cached const committee = await this.publisher.getCurrentEpochCommittee(); @@ -725,7 +714,7 @@ export class Sequencer { })) protected async enqueuePublishL2Block( block: L2Block, - attestations?: Signature[], + attestations?: CommitteeAttestation[], txHashes?: TxHash[], ): Promise { // Publishes new block to the network and awaits the tx to be mined diff --git a/yarn-project/sequencer-client/src/sequencer/utils.ts b/yarn-project/sequencer-client/src/sequencer/utils.ts index 7f98c6fa165f..a9cc64bb2d15 100644 --- a/yarn-project/sequencer-client/src/sequencer/utils.ts +++ b/yarn-project/sequencer-client/src/sequencer/utils.ts @@ -1,5 +1,5 @@ import type { EthAddress } from '@aztec/foundation/eth-address'; -import { Signature } from '@aztec/foundation/eth-signature'; +import { CommitteeAttestation } from '@aztec/stdlib/block'; import type { BlockAttestation } from '@aztec/stdlib/p2p'; export enum SequencerState { @@ -50,19 +50,27 @@ export function sequencerStateToNumber(state: SequencerState): number { * * @todo: perform this logic within the memory attestation store instead? */ -export function orderAttestations(attestations: BlockAttestation[], orderAddresses: EthAddress[]): Signature[] { +export function orderAttestations( + attestations: BlockAttestation[], + orderAddresses: EthAddress[], +): CommitteeAttestation[] { // Create a map of sender addresses to BlockAttestations - const attestationMap = new Map(); + const attestationMap = new Map(); for (const attestation of attestations) { const sender = attestation.getSender(); - attestationMap.set(sender.toString(), attestation); + if (sender) { + attestationMap.set( + sender.toString(), + CommitteeAttestation.fromAddressAndSignature(sender, attestation.signature), + ); + } } - // Create the ordered array based on the orderAddresses, else return an empty signature + // Create the ordered array based on the orderAddresses, else return an empty attestation const orderedAttestations = orderAddresses.map(address => { const addressString = address.toString(); - return attestationMap.get(addressString)?.signature || Signature.empty(); + return attestationMap.get(addressString) || CommitteeAttestation.fromAddress(address); }); return orderedAttestations; diff --git a/yarn-project/stdlib/src/block/index.ts b/yarn-project/stdlib/src/block/index.ts index bc324d9cbd36..568536c2ef25 100644 --- a/yarn-project/stdlib/src/block/index.ts +++ b/yarn-project/stdlib/src/block/index.ts @@ -6,3 +6,4 @@ export * from './l2_block_number.js'; export * from './l2_block_source.js'; export * from './block_hash.js'; export * from './published_l2_block.js'; +export * from './proposal/index.js'; diff --git a/yarn-project/stdlib/src/block/proposal/committee_attestation.ts b/yarn-project/stdlib/src/block/proposal/committee_attestation.ts new file mode 100644 index 000000000000..ca899920d5ab --- /dev/null +++ b/yarn-project/stdlib/src/block/proposal/committee_attestation.ts @@ -0,0 +1,70 @@ +import type { ViemCommitteeAttestation } from '@aztec/ethereum'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { Signature } from '@aztec/foundation/eth-signature'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { z } from 'zod'; + +export { Signature }; +export { EthAddress }; + +export class CommitteeAttestation { + constructor( + public readonly address: EthAddress, + public readonly signature: Signature, + ) {} + + static get schema() { + return z + .object({ + address: EthAddress.schema, + signature: Signature.schema, + }) + .transform(({ address, signature }) => new CommitteeAttestation(address, signature)); + } + + // Create an empty attestation for an address that has not signed + static fromAddress(address: EthAddress): CommitteeAttestation { + return new CommitteeAttestation(address, Signature.empty()); + } + + // Create an attestation from an address and a signature + static fromAddressAndSignature(address: EthAddress, signature: Signature): CommitteeAttestation { + return new CommitteeAttestation(address, signature); + } + + static fromViem(viem: ViemCommitteeAttestation): CommitteeAttestation { + return new CommitteeAttestation(EthAddress.fromString(viem.addr), Signature.fromViemSignature(viem.signature)); + } + + static fromBuffer(buffer: Buffer): CommitteeAttestation { + const reader = BufferReader.asReader(buffer); + const address = reader.readObject(EthAddress); + const signature = reader.readObject(Signature); + return new CommitteeAttestation(address, signature); + } + + static random(): CommitteeAttestation { + // note: will be invalid + return new CommitteeAttestation(EthAddress.random(), Signature.random()); + } + + static empty(): CommitteeAttestation { + return new CommitteeAttestation(EthAddress.ZERO, Signature.empty()); + } + + toBuffer(): Buffer { + return serializeToBuffer([this.address, this.signature]); + } + + equals(other: CommitteeAttestation): boolean { + return this.address.equals(other.address) && this.signature.equals(other.signature); + } + + toViem(): ViemCommitteeAttestation { + return { + addr: this.address.toString(), + signature: this.signature.toViemSignature(), + }; + } +} diff --git a/yarn-project/stdlib/src/block/proposal/index.ts b/yarn-project/stdlib/src/block/proposal/index.ts new file mode 100644 index 000000000000..bc2a01fdd294 --- /dev/null +++ b/yarn-project/stdlib/src/block/proposal/index.ts @@ -0,0 +1 @@ +export * from './committee_attestation.js'; diff --git a/yarn-project/stdlib/src/block/published_l2_block.test.ts b/yarn-project/stdlib/src/block/published_l2_block.test.ts index 03f18a273245..f0451f14fdb1 100644 --- a/yarn-project/stdlib/src/block/published_l2_block.test.ts +++ b/yarn-project/stdlib/src/block/published_l2_block.test.ts @@ -1,17 +1,17 @@ -import { Signature } from '@aztec/foundation/eth-signature'; import { jsonStringify } from '@aztec/foundation/json-rpc'; import { L2Block } from './l2_block.js'; -import { PublishedL2BlockSchema } from './published_l2_block.js'; +import { CommitteeAttestation } from './proposal/committee_attestation.js'; +import { PublishedL2Block } from './published_l2_block.js'; describe('PublishedL2Block', () => { it('convert to and from json', async () => { const block = { block: await L2Block.random(1), - signatures: [Signature.random()], + attestations: [CommitteeAttestation.random()], l1: { blockHash: `0x`, blockNumber: 1n, timestamp: 0n }, }; - const parsed = PublishedL2BlockSchema.parse(JSON.parse(jsonStringify(block))); + const parsed = PublishedL2Block.schema.parse(JSON.parse(jsonStringify(block))); expect(parsed).toEqual(block); }); }); diff --git a/yarn-project/stdlib/src/block/published_l2_block.ts b/yarn-project/stdlib/src/block/published_l2_block.ts index 5c711bc6ab3f..41785fdd1167 100644 --- a/yarn-project/stdlib/src/block/published_l2_block.ts +++ b/yarn-project/stdlib/src/block/published_l2_block.ts @@ -1,37 +1,51 @@ -import { Signature } from '@aztec/foundation/eth-signature'; +// Ignoring import issue to fix portable inferred type issue in zod schema import { schemas } from '@aztec/foundation/schemas'; -import { L2Block } from '@aztec/stdlib/block'; import { z } from 'zod'; import { BlockAttestation } from '../p2p/block_attestation.js'; import { ConsensusPayload } from '../p2p/consensus_payload.js'; +import { L2Block } from './l2_block.js'; +import { CommitteeAttestation } from './proposal/committee_attestation.js'; -export type L1PublishedData = { - blockNumber: bigint; - timestamp: bigint; - blockHash: string; -}; +export class L1PublishedData { + constructor( + public blockNumber: bigint, + public timestamp: bigint, + public blockHash: string, + ) {} -export type PublishedL2Block = { - block: L2Block; - l1: L1PublishedData; - signatures: Signature[]; -}; + static get schema() { + return z.object({ + blockNumber: schemas.BigInt, + timestamp: schemas.BigInt, + blockHash: z.string(), + }); + } +} + +export class PublishedL2Block { + constructor( + public block: L2Block, + public l1: L1PublishedData, + public attestations: CommitteeAttestation[], + ) {} -export const PublishedL2BlockSchema = z.object({ - block: L2Block.schema, - l1: z.object({ - blockNumber: schemas.BigInt, - timestamp: schemas.BigInt, - blockHash: z.string(), - }), - signatures: z.array(Signature.schema), -}); + static get schema() { + return z.object({ + block: L2Block.schema, + l1: L1PublishedData.schema, + attestations: z.array(CommitteeAttestation.schema), + }); + } +} export function getAttestationsFromPublishedL2Block(block: PublishedL2Block) { const payload = ConsensusPayload.fromBlock(block.block); - return block.signatures - .filter(sig => !sig.isEmpty) - .map(signature => new BlockAttestation(block.block.header.globalVariables.blockNumber, payload, signature)); + return block.attestations + .filter(attestation => !attestation.signature.isEmpty()) + .map( + attestation => + new BlockAttestation(block.block.header.globalVariables.blockNumber, payload, attestation.signature), + ); } diff --git a/yarn-project/stdlib/src/block/test/l2_tips_store_test_suite.ts b/yarn-project/stdlib/src/block/test/l2_tips_store_test_suite.ts index 6ad51d908c0b..f12c6a6ac65a 100644 --- a/yarn-project/stdlib/src/block/test/l2_tips_store_test_suite.ts +++ b/yarn-project/stdlib/src/block/test/l2_tips_store_test_suite.ts @@ -17,7 +17,7 @@ export function testL2TipsStore(makeTipsStore: () => Promise) { const makeBlock = (number: number): PublishedL2Block => ({ block: { number, header: { hash: () => Promise.resolve(new Fr(number)) } as BlockHeader } as L2Block, l1: { blockNumber: BigInt(number), blockHash: `0x${number}`, timestamp: BigInt(number) }, - signatures: [], + attestations: [], }); const makeBlockId = (number: number): L2BlockId => ({ diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index 88fa584a3b99..874c04a30e3e 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -1,6 +1,5 @@ import { randomInt } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { type JsonRpcTestContext, createJsonRpcTestSetup } from '@aztec/foundation/json-rpc/test'; @@ -9,6 +8,7 @@ import omit from 'lodash.omit'; import type { ContractArtifact } from '../abi/abi.js'; import { FunctionSelector } from '../abi/function_selector.js'; import { AztecAddress } from '../aztec-address/index.js'; +import { CommitteeAttestation } from '../block/index.js'; import { L2Block } from '../block/l2_block.js'; import type { L2Tips } from '../block/l2_block_source.js'; import type { PublishedL2Block } from '../block/published_l2_block.js'; @@ -99,7 +99,7 @@ describe('ArchiverApiSchema', () => { const response = await context.client.getPublishedBlocks(1, 1); expect(response).toHaveLength(1); expect(response[0].block.constructor.name).toEqual('L2Block'); - expect(response[0].signatures[0]).toBeInstanceOf(Signature); + expect(response[0].attestations[0]).toBeInstanceOf(CommitteeAttestation); expect(response[0].l1).toBeDefined(); }); @@ -273,7 +273,7 @@ class MockArchiver implements ArchiverApi { return [ { block: await L2Block.random(from), - signatures: [Signature.random()], + attestations: [CommitteeAttestation.random()], l1: { blockHash: `0x`, blockNumber: 1n, timestamp: 0n }, }, ]; diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index 5854e88fe1ed..f3b14086d03f 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -4,7 +4,7 @@ import { z } from 'zod'; import { L2Block } from '../block/l2_block.js'; import { type L2BlockSource, L2TipsSchema } from '../block/l2_block_source.js'; -import { PublishedL2BlockSchema } from '../block/published_l2_block.js'; +import { PublishedL2Block } from '../block/published_l2_block.js'; import { ContractClassPublicSchema, type ContractDataSource, @@ -45,7 +45,7 @@ export const ArchiverApiSchema: ApiSchemaFor = { getPublishedBlocks: z .function() .args(schemas.Integer, schemas.Integer, optional(z.boolean())) - .returns(z.array(PublishedL2BlockSchema)), + .returns(z.array(PublishedL2Block.schema)), getTxEffect: z.function().args(TxHash.schema).returns(indexedTxSchema().optional()), getSettledTxReceipt: z.function().args(TxHash.schema).returns(TxReceipt.schema.optional()), getL2SlotNumber: z.function().args().returns(schemas.BigInt), diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts index ba93c1ac9228..c7b0f7b3d6c5 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts @@ -10,7 +10,6 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { randomInt } from '@aztec/foundation/crypto'; import { memoize } from '@aztec/foundation/decorators'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { type JsonRpcTestContext, createJsonRpcTestSetup } from '@aztec/foundation/json-rpc/test'; import { SiblingPath } from '@aztec/foundation/trees'; @@ -20,6 +19,7 @@ import omit from 'lodash.omit'; import type { ContractArtifact } from '../abi/abi.js'; import { AztecAddress } from '../aztec-address/index.js'; import type { InBlock } from '../block/in_block.js'; +import { CommitteeAttestation } from '../block/index.js'; import { L2Block } from '../block/l2_block.js'; import type { L2Tips } from '../block/l2_block_source.js'; import type { PublishedL2Block } from '../block/published_l2_block.js'; @@ -195,7 +195,7 @@ describe('AztecNodeApiSchema', () => { const response = await context.client.getPublishedBlocks(1, 1); expect(response).toHaveLength(1); expect(response[0].block.constructor.name).toEqual('L2Block'); - expect(response[0].signatures[0]).toBeInstanceOf(Signature); + expect(response[0].attestations[0]).toBeInstanceOf(CommitteeAttestation); expect(response[0].l1).toBeDefined(); }); @@ -536,7 +536,7 @@ class MockAztecNode implements AztecNode { .fill(0) .map(async i => ({ block: await L2Block.random(from + i), - signatures: [Signature.random()], + attestations: [CommitteeAttestation.random()], l1: { blockHash: Buffer32.random().toString(), blockNumber: 1n, timestamp: 1n }, })), ); diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.ts b/yarn-project/stdlib/src/interfaces/aztec-node.ts index f93404ab081f..eaaa1af90ba7 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.ts @@ -17,7 +17,7 @@ import { type InBlock, inBlockSchemaFor } from '../block/in_block.js'; import { L2Block } from '../block/l2_block.js'; import { type L2BlockNumber, L2BlockNumberSchema } from '../block/l2_block_number.js'; import { type L2BlockSource, type L2Tips, L2TipsSchema } from '../block/l2_block_source.js'; -import { PublishedL2BlockSchema } from '../block/published_l2_block.js'; +import { PublishedL2Block } from '../block/published_l2_block.js'; import { type ContractClassPublic, ContractClassPublicSchema, @@ -484,7 +484,7 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getBlocks: z.function().args(z.number(), z.number()).returns(z.array(L2Block.schema)), - getPublishedBlocks: z.function().args(z.number(), z.number()).returns(z.array(PublishedL2BlockSchema)), + getPublishedBlocks: z.function().args(z.number(), z.number()).returns(z.array(PublishedL2Block.schema)), getCurrentBaseFees: z.function().returns(GasFees.schema), diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index 528181a4912a..1ed57a28293d 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -6,6 +6,7 @@ import { Fr } from '@aztec/foundation/fields'; import type { ContractArtifact } from '../abi/abi.js'; import { AztecAddress } from '../aztec-address/index.js'; +import { CommitteeAttestation } from '../block/index.js'; import { L2Block } from '../block/l2_block.js'; import type { PublishedL2Block } from '../block/published_l2_block.js'; import { computeContractAddressFromInstance } from '../contract/contract_address.js'; @@ -299,7 +300,7 @@ export async function randomPublishedL2Block( }; const signers = opts.signers ?? times(3, () => Secp256k1Signer.random()); - const attestations = await Promise.all( + const atts = await Promise.all( signers.map(signer => makeBlockAttestation({ signer, @@ -310,6 +311,8 @@ export async function randomPublishedL2Block( }), ), ); - const signatures = attestations.map(attestation => attestation.signature); - return { block, l1, signatures }; + const attestations = atts.map( + (attestation, i) => new CommitteeAttestation(signers[i].address, attestation.signature), + ); + return { block, l1, attestations }; } diff --git a/yarn-project/txe/src/state_machine/index.ts b/yarn-project/txe/src/state_machine/index.ts index 363f4a2f8ace..774ebf1760a9 100644 --- a/yarn-project/txe/src/state_machine/index.ts +++ b/yarn-project/txe/src/state_machine/index.ts @@ -62,7 +62,7 @@ export class TXEStateMachine { blockNumber: block.header.globalVariables.blockNumber.toBigInt(), timestamp: block.header.globalVariables.blockNumber.toBigInt(), }, - signatures: [], + attestations: [], }, ]), this.syncDataProvider.setHeader(block.header),