diff --git a/.github/workflows/deploy-fisherman-network.yaml b/.github/workflows/deploy-fisherman-network.yaml index aa4f001a0535..258d473dae98 100644 --- a/.github/workflows/deploy-fisherman-network.yaml +++ b/.github/workflows/deploy-fisherman-network.yaml @@ -123,10 +123,10 @@ jobs: echo "USE_NETWORK_CONFIG=true" >> $GITHUB_ENV elif [[ "${{ inputs.l1_network }}" == "mainnet" ]]; then echo "NETWORK=mainnet" >> $GITHUB_ENV - echo "NAMESPACE=ignition-fisherman-mainnet" >> $GITHUB_ENV + echo "NAMESPACE=mainnet" >> $GITHUB_ENV echo "ETHEREUM_CHAIN_ID=1" >> $GITHUB_ENV echo "L1_NETWORK=mainnet" >> $GITHUB_ENV - echo "SNAPSHOT_BUCKET_DIRECTORY=ignition-mainnet" >> $GITHUB_ENV + echo "SNAPSHOT_BUCKET_DIRECTORY=mainnet" >> $GITHUB_ENV echo "USE_NETWORK_CONFIG=true" >> $GITHUB_ENV fi diff --git a/.test_patterns.yml b/.test_patterns.yml index 73ac840c65ae..75d3bbad38d2 100644 --- a/.test_patterns.yml +++ b/.test_patterns.yml @@ -78,6 +78,11 @@ tests: error_regex: "ultra_circuit_builder.test.cpp:631: Failure" owners: - *luke + # http://ci.aztec-labs.com/1593f7c89e22b51b + - regex: stdlib_primitives_tests stdlibBiggroupSecp256k1/1.WnafSecp256k1StaggerOutOfRangeFails + error_regex: "biggroup_nafs: stagger fragment is not in range" + owners: + - *luke # noir # Something to do with how I run the tests now. Think these are fine in nextest. diff --git a/cspell.json b/cspell.json index b53ecf78cbb2..ab3d515bf4f1 100644 --- a/cspell.json +++ b/cspell.json @@ -187,6 +187,7 @@ "merkleizing", "messagebox", "mimc", + "mintable", "mktemp", "mload", "mockify", diff --git a/l1-contracts/lib/circuits b/l1-contracts/lib/circuits index 3dceb985b1b4..1d5e440b3edf 160000 --- a/l1-contracts/lib/circuits +++ b/l1-contracts/lib/circuits @@ -1 +1 @@ -Subproject commit 3dceb985b1b4b28d07c4c27147df0e945b482f3e +Subproject commit 1d5e440b3edfc9922f9fd33b96715d27a2f7ad90 diff --git a/l1-contracts/script/StakingAssetHandler.s.sol b/l1-contracts/script/StakingAssetHandler.s.sol index 98bb41433343..dcfd4e76ffc8 100644 --- a/l1-contracts/script/StakingAssetHandler.s.sol +++ b/l1-contracts/script/StakingAssetHandler.s.sol @@ -30,7 +30,7 @@ contract StakingAssetHandlerScript is Test { bytes32 public constant DEPOSIT_MERKLE_ROOT = bytes32(0); ZKPassportVerifier internal constant zkPassportVerifier = - ZKPassportVerifier(0xf7480fd0A9289c062C52532f11D31e0b7A30ABe3); + ZKPassportVerifier(0x3101Bad9eA5fACadA5554844a1a88F7Fe48D4DE0); TestERC20 public constant stakingAsset = TestERC20(0x6732CEDafCBF85Afa9B5C83f0385967840BBCe47); IRegistry public constant registry = IRegistry(0xc2F24280F5c7F4897370dFDEb30f79Ded14f1c81); diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index f53d385ae6b1..a34eed14a897 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -85,7 +85,6 @@ struct RollupConfig { uint32 version; IERC20 feeAsset; IFeeJuicePortal feeAssetPortal; - IRewardDistributor rewardDistributor; IVerifier epochProofVerifier; IInbox inbox; IOutbox outbox; diff --git a/l1-contracts/src/core/libraries/rollup/BlobLib.sol b/l1-contracts/src/core/libraries/rollup/BlobLib.sol index 9f2e882f50e0..838e7076ea0f 100644 --- a/l1-contracts/src/core/libraries/rollup/BlobLib.sol +++ b/l1-contracts/src/core/libraries/rollup/BlobLib.sol @@ -22,7 +22,8 @@ import {Vm} from "forge-std/Vm.sol"; * The VM_ADDRESS (0x7109709ECfa91a80626fF3989D68f67F5b1DD12D) is a special address used to detect * when the contract is running in a Foundry test environment. This address is derived from * keccak256("hevm cheat code") and corresponds to Foundry's VM contract that provides testing utilities. - * When VM_ADDRESS.code.length > 0, it indicates we're in a test environment, allowing the library to: + * When block.chainid == 31337 && VM_ADDRESS.code.length > 0, it indicates we're in a test environment, + * allowing the library to: * - Use Foundry's getBlobBaseFee() cheatcode instead of block.blobbasefee * - Use Foundry's getBlobhashes() cheatcode instead of the blobhash() opcode * This enables comprehensive testing of blob functionality without requiring actual blob transactions. @@ -47,7 +48,7 @@ library BlobLib { * @return uint256 - The blob base fee */ function getBlobBaseFee() internal view returns (uint256) { - if (VM_ADDRESS.code.length > 0) { + if (block.chainid == 31_337 && VM_ADDRESS.code.length > 0) { return Vm(VM_ADDRESS).getBlobBaseFee(); } return block.blobbasefee; @@ -62,7 +63,7 @@ library BlobLib { * @return blobHash - The blob hash */ function getBlobHash(uint256 _index) internal view returns (bytes32 blobHash) { - if (VM_ADDRESS.code.length > 0) { + if (block.chainid == 31_337 && VM_ADDRESS.code.length > 0) { // We know that this one is ABHORRENT. But it should not exists, and only will // be hit in testing. bytes32[] memory blobHashes = Vm(VM_ADDRESS).getBlobhashes(); diff --git a/l1-contracts/src/core/messagebridge/Inbox.sol b/l1-contracts/src/core/messagebridge/Inbox.sol index 34b77688bb0f..30ccdc7703b7 100644 --- a/l1-contracts/src/core/messagebridge/Inbox.sol +++ b/l1-contracts/src/core/messagebridge/Inbox.sol @@ -114,7 +114,7 @@ contract Inbox is IInbox { bytes16 updatedRollingHash = bytes16(keccak256(abi.encodePacked(rollingHash, leaf))); state = InboxState({ - rollingHash: bytes16(updatedRollingHash), + rollingHash: updatedRollingHash, totalMessagesInserted: totalMessagesInserted + 1, inProgress: inProgress }); diff --git a/l1-contracts/src/core/reward-boost/RewardBooster.sol b/l1-contracts/src/core/reward-boost/RewardBooster.sol index f498333b1c72..e20ce3e14d52 100644 --- a/l1-contracts/src/core/reward-boost/RewardBooster.sol +++ b/l1-contracts/src/core/reward-boost/RewardBooster.sol @@ -115,7 +115,7 @@ contract RewardBooster is IBooster { } function _toShares(uint256 _value) internal view returns (uint256) { - if (_value > CONFIG_MAX_SCORE) { + if (_value >= CONFIG_MAX_SCORE) { return CONFIG_K; } uint256 t = (CONFIG_MAX_SCORE - _value); diff --git a/l1-contracts/src/governance/CoinIssuer.sol b/l1-contracts/src/governance/CoinIssuer.sol index b4767dcb07ea..a31edfbfb2c2 100644 --- a/l1-contracts/src/governance/CoinIssuer.sol +++ b/l1-contracts/src/governance/CoinIssuer.sol @@ -11,17 +11,49 @@ import {Ownable2Step} from "@oz/access/Ownable2Step.sol"; /** * @title CoinIssuer * @author Aztec Labs - * @notice A contract that allows minting of coins at a maximum fixed rate + * @notice A contract that allows minting of coins at a maximum percentage rate per year using discrete annual budgets + * + * This contract uses a discrete annual budget model: + * - Years are fixed periods from deployment: + * - year 0 = [deployment, deployment + 365d) + * - year 1 = [deployment + 365d, deployment + (2) * 365d) + * - ... + * - year n = [deployment + 365d * n, deployment + (n + 1) * 365d) + * - Each year's budget is calculated at the start of that year based on the actual supply at that moment + * - Budget = totalSupply() × NOMINAL_ANNUAL_PERCENTAGE_CAP / 1e18 + * - Unused budget from year N is LOST when year N+1 begins (use-it-or-lose-it) + * + * Rate semantics: If the full budget is minted every year, the effective annual inflation rate equals + * NOMINAL_ANNUAL_PERCENTAGE_CAP. For example, setting the rate to 0.10e18 (10%) and fully minting each + * year will result in supply growing by exactly 10% annually: supply(year N) = supply(year 0) × (1.10)^N + * + * Partial minting: If less than the full budget is minted in year N, the remaining allowance is lost + * at the year N→N+1 boundary. Year N+1's budget is calculated based on the actual supply at the start + * of year N+1, which reflects only what was actually minted. + * + * @dev The NOMINAL_ANNUAL_PERCENTAGE_CAP is in e18 precision where 1e18 = 100% + * + * @dev The token MUST have a non-zero initial supply at deployment, or an alternative way to mint the token. */ contract CoinIssuer is ICoinIssuer, Ownable { IMintableERC20 public immutable ASSET; - uint256 public immutable RATE; - uint256 public timeOfLastMint; + uint256 public immutable NOMINAL_ANNUAL_PERCENTAGE_CAP; + uint256 public immutable DEPLOYMENT_TIME; - constructor(IMintableERC20 _asset, uint256 _rate, address _owner) Ownable(_owner) { + // Note that the state variables below are "cached": + // they are only updated when minting after a year boundary. + uint256 public cachedBudgetYear; + uint256 public cachedBudget; + + constructor(IMintableERC20 _asset, uint256 _annualPercentage, address _owner) Ownable(_owner) { ASSET = _asset; - RATE = _rate; - timeOfLastMint = block.timestamp; + NOMINAL_ANNUAL_PERCENTAGE_CAP = _annualPercentage; + DEPLOYMENT_TIME = block.timestamp; + + cachedBudgetYear = 0; + cachedBudget = _getNewBudget(); + + emit BudgetReset(0, cachedBudget); } function acceptTokenOwnership() external override(ICoinIssuer) onlyOwner { @@ -29,26 +61,70 @@ contract CoinIssuer is ICoinIssuer, Ownable { } /** - * @notice Mint tokens up to the `mintAvailable` limit - * Beware that the mintAvailable will be reset to 0, and not just - * reduced by the amount minted. + * @notice Mint `_amount` tokens to `_to` + * + * @dev The `_amount` must be within the `cachedBudget` * * @param _to - The address to receive the funds * @param _amount - The amount to mint */ function mint(address _to, uint256 _amount) external override(ICoinIssuer) onlyOwner { - uint256 maxMint = mintAvailable(); - require(_amount <= maxMint, Errors.CoinIssuer__InsufficientMintAvailable(maxMint, _amount)); - timeOfLastMint = block.timestamp; + // Update state if we've crossed into a new year (will reset budget and forfeit unused amount) + _updateBudgetIfNeeded(); + + require(_amount <= cachedBudget, Errors.CoinIssuer__InsufficientMintAvailable(cachedBudget, _amount)); + cachedBudget -= _amount; + ASSET.mint(_to, _amount); } /** - * @notice The amount of funds that is available for "minting" + * @notice The amount of funds that is available for "minting" in the current year + * If we've crossed into a new year since the last mint, returns the fresh budget + * for the new year based on current supply. * * @return The amount mintable */ function mintAvailable() public view override(ICoinIssuer) returns (uint256) { - return RATE * (block.timestamp - timeOfLastMint); + uint256 currentYear = _yearSinceGenesis(); + + // Until the budget is stale, return the cached budget + if (cachedBudgetYear >= currentYear) { + return cachedBudget; + } + + // Crossed into new year(s): compute fresh budget + return _getNewBudget(); + } + + /** + * @notice Internal function to update year and budget when crossing year boundaries + * + * @dev If multiple years have passed without minting, jumps directly to current year + * and all intermediate years' budgets are lost + */ + function _updateBudgetIfNeeded() private { + uint256 currentYear = _yearSinceGenesis(); + // If the budget is for the past, update the budget. + if (cachedBudgetYear < currentYear) { + cachedBudgetYear = currentYear; + cachedBudget = _getNewBudget(); + + emit BudgetReset(currentYear, cachedBudget); + } + } + + /** + * @notice Internal function to compute the current year since genesis + */ + function _yearSinceGenesis() private view returns (uint256) { + return (block.timestamp - DEPLOYMENT_TIME) / 365 days; + } + + /** + * @notice Internal function to compute a fresh budget + */ + function _getNewBudget() private view returns (uint256) { + return ASSET.totalSupply() * NOMINAL_ANNUAL_PERCENTAGE_CAP / 1e18; } } diff --git a/l1-contracts/src/governance/interfaces/ICoinIssuer.sol b/l1-contracts/src/governance/interfaces/ICoinIssuer.sol index b770c11352d6..229cc139e331 100644 --- a/l1-contracts/src/governance/interfaces/ICoinIssuer.sol +++ b/l1-contracts/src/governance/interfaces/ICoinIssuer.sol @@ -3,6 +3,8 @@ pragma solidity >=0.8.27; interface ICoinIssuer { + event BudgetReset(uint256 indexed newYear, uint256 newBudget); + function mint(address _to, uint256 _amount) external; function acceptTokenOwnership() external; function mintAvailable() external view returns (uint256); diff --git a/l1-contracts/src/mock/StakingAssetHandler.sol b/l1-contracts/src/mock/StakingAssetHandler.sol index 3e7219dafb14..6475795a4baa 100644 --- a/l1-contracts/src/mock/StakingAssetHandler.sol +++ b/l1-contracts/src/mock/StakingAssetHandler.sol @@ -7,7 +7,9 @@ import {IMintableERC20} from "@aztec/shared/interfaces/IMintableERC20.sol"; import {G1Point, G2Point} from "@aztec/shared/libraries/BN254Lib.sol"; import {Ownable} from "@oz/access/Ownable.sol"; import {MerkleProof} from "@oz/utils/cryptography/MerkleProof.sol"; -import {ZKPassportVerifier, ProofVerificationParams, BoundData} from "@zkpassport/ZKPassportVerifier.sol"; +import { + ZKPassportVerifier, ProofVerificationParams, BoundData, OS, FaceMatchMode +} from "@zkpassport/ZKPassportVerifier.sol"; /** * @title StakingAssetHandler @@ -56,6 +58,7 @@ interface IStakingAssetHandler { error InvalidAge(); error InvalidCountry(); error InvalidValidityPeriod(); + error InvalidFaceMatch(); error ExtraDiscloseDataNonZero(); error SybilDetected(bytes32 _nullifier); error AttesterDoesNotExist(address _attester); @@ -120,6 +123,12 @@ contract StakingAssetHandler is IStakingAssetHandler, Ownable { string internal constant IRN = "IRN"; string internal constant CUB = "CUB"; + // Minimum age + uint8 public constant MIN_AGE = 18; + + // Validity period in seconds + uint256 public constant VALIDITY_PERIOD = 7 days; + IMintableERC20 public immutable STAKING_ASSET; IRegistry public immutable REGISTRY; @@ -143,9 +152,6 @@ contract StakingAssetHandler is IStakingAssetHandler, Ownable { // ZKPassport constraints string public validDomain; string public validScope; - uint256 public validValidityPeriodInSeconds = 7 days; - uint8 public minAge = 18; - string[] internal excludedCountries; constructor(StakingAssetHandlerArgs memory _args) Ownable(_args.owner) { require(_args.depositsPerMint > 0, CannotMintZeroAmount()); @@ -181,12 +187,6 @@ contract StakingAssetHandler is IStakingAssetHandler, Ownable { validDomain = _args.domain; validScope = _args.scope; - excludedCountries = new string[](4); - excludedCountries[0] = CUB; - excludedCountries[1] = IRN; - excludedCountries[2] = PKR; - excludedCountries[3] = UKR; - skipBindCheck = _args.skipBindCheck; skipMerkleCheck = _args.skipMerkleCheck; } @@ -323,10 +323,11 @@ contract StakingAssetHandler is IStakingAssetHandler, Ownable { function _validatePassportProof(address _attester, ProofVerificationParams calldata _params) internal { // Must NOT be using dev mode - https://docs.zkpassport.id/getting-started/dev-mode // If active, nullifiers will end up being zero, but it is user provided input, so we are sanity checking it - require(_params.devMode == false, InvalidProof()); + require(_params.serviceConfig.devMode == false, InvalidProof()); - require(keccak256(bytes(_params.domain)) == keccak256(bytes(validDomain)), InvalidDomain()); - require(keccak256(bytes(_params.scope)) == keccak256(bytes(validScope)), InvalidScope()); + require(keccak256(bytes(_params.serviceConfig.domain)) == keccak256(bytes(validDomain)), InvalidDomain()); + require(keccak256(bytes(_params.serviceConfig.scope)) == keccak256(bytes(validScope)), InvalidScope()); + require(_params.serviceConfig.validityPeriodInSeconds == VALIDITY_PERIOD, InvalidValidityPeriod()); (bool verified, bytes32 nullifier) = zkPassportVerifier.verifyProof(_params); @@ -334,7 +335,7 @@ contract StakingAssetHandler is IStakingAssetHandler, Ownable { require(!nullifiers[nullifier], SybilDetected(nullifier)); if (!skipBindCheck) { - BoundData memory boundData = zkPassportVerifier.getBoundData(_params); + BoundData memory boundData = zkPassportVerifier.getBoundData(_params.commitments); // Make sure the bound user address is the same as the _attester require(boundData.senderAddress == _attester, InvalidBoundAddress(boundData.senderAddress, _attester)); @@ -343,19 +344,26 @@ contract StakingAssetHandler is IStakingAssetHandler, Ownable { // Make sure the custom data is empty require(bytes(boundData.customData).length == 0, ExtraDiscloseDataNonZero()); - // Validity period check - require(validValidityPeriodInSeconds == _params.validityPeriodInSeconds, InvalidValidityPeriod()); - // Age check - bool isAgeValid = zkPassportVerifier.isAgeAboveOrEqual(minAge, _params); + bool isAgeValid = zkPassportVerifier.isAgeAboveOrEqual(MIN_AGE, _params.commitments, _params.serviceConfig); require(isAgeValid, InvalidAge()); // Country exclusion check - bool isCountryValid = zkPassportVerifier.isNationalityOut(excludedCountries, _params); + string[] memory excludedCountries = new string[](4); + excludedCountries[0] = CUB; + excludedCountries[1] = IRN; + excludedCountries[2] = PKR; + excludedCountries[3] = UKR; + bool isCountryValid = zkPassportVerifier.isNationalityOut(excludedCountries, _params.commitments); require(isCountryValid, InvalidCountry()); // Sanctions check - zkPassportVerifier.enforceSanctionsRoot(_params); + zkPassportVerifier.enforceSanctionsRoot(_params.commitments); + + // Face match check + bool isFaceMatchValid = + zkPassportVerifier.isFaceMatchVerified(FaceMatchMode.STRICT, OS.ANY, _params.commitments, _params.serviceConfig); + require(isFaceMatchValid, InvalidFaceMatch()); } // Set nullifier to consumed diff --git a/l1-contracts/test/DateGatedRelayer.t.sol b/l1-contracts/test/DateGatedRelayer.t.sol index 5aa179ac9a98..0ca5771fba9f 100644 --- a/l1-contracts/test/DateGatedRelayer.t.sol +++ b/l1-contracts/test/DateGatedRelayer.t.sol @@ -35,7 +35,8 @@ contract DateGatedRelayerTest is Test { uint256 gatedUntil = bound(_gatedUntil, block.timestamp + 1, type(uint32).max); TestERC20 testERC20 = new TestERC20("test", "TEST", address(this)); - CoinIssuer coinIssuer = new CoinIssuer(testERC20, 100, address(this)); + testERC20.mint(address(this), 1e18); + CoinIssuer coinIssuer = new CoinIssuer(testERC20, 100e18, address(this)); testERC20.transferOwnership(address(coinIssuer)); coinIssuer.acceptTokenOwnership(); @@ -45,11 +46,15 @@ contract DateGatedRelayerTest is Test { uint256 warp = bound(_warp, gatedUntil, type(uint32).max); vm.expectRevert(); - coinIssuer.mint(address(this), 100); + coinIssuer.mint(address(this), 1); vm.warp(warp); - dateGatedRelayer.relay(address(coinIssuer), abi.encodeWithSelector(CoinIssuer.mint.selector, address(this), 100)); + uint256 mintAvailable = coinIssuer.mintAvailable(); + dateGatedRelayer.relay( + address(coinIssuer), abi.encodeWithSelector(CoinIssuer.mint.selector, address(this), mintAvailable) + ); - assertEq(testERC20.balanceOf(address(this)), 100); + assertEq(testERC20.balanceOf(address(this)), mintAvailable + 1e18, "balanceOf"); + assertEq(testERC20.totalSupply(), mintAvailable + 1e18, "totalSupply"); } } diff --git a/l1-contracts/test/governance/coin-issuer/Base.t.sol b/l1-contracts/test/governance/coin-issuer/Base.t.sol index 2acfb215bc87..5d99e3a1c186 100644 --- a/l1-contracts/test/governance/coin-issuer/Base.t.sol +++ b/l1-contracts/test/governance/coin-issuer/Base.t.sol @@ -13,9 +13,10 @@ contract CoinIssuerBase is Test { CoinIssuer internal nom; - function _deploy(uint256 _rate) internal { + function _deploy(uint256 _rate, uint256 _initialSupply) internal { TestERC20 testERC20 = new TestERC20("test", "TEST", address(this)); token = IMintableERC20(address(testERC20)); + token.mint(address(this), _initialSupply); nom = new CoinIssuer(token, _rate, address(this)); testERC20.transferOwnership(address(nom)); nom.acceptTokenOwnership(); diff --git a/l1-contracts/test/governance/coin-issuer/acceptTokenOwnership.t.sol b/l1-contracts/test/governance/coin-issuer/acceptTokenOwnership.t.sol new file mode 100644 index 000000000000..36f2c31651b6 --- /dev/null +++ b/l1-contracts/test/governance/coin-issuer/acceptTokenOwnership.t.sol @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity >=0.8.27; + +import {Ownable} from "@oz/access/Ownable.sol"; +import {Ownable2Step} from "@oz/access/Ownable2Step.sol"; +import {CoinIssuerBase} from "./Base.t.sol"; +import {TestERC20} from "@aztec/mock/TestERC20.sol"; +import {IMintableERC20} from "@aztec/shared/interfaces/IMintableERC20.sol"; +import {CoinIssuer} from "@aztec/governance/CoinIssuer.sol"; + +contract AcceptTokenOwnershipTest is CoinIssuerBase { + function setUp() public { + _deploy(1e18, 1_000_000); + } + + function test_GivenCallerIsNotOwner(address _caller) external { + // it reverts + vm.assume(_caller != address(this)); + vm.expectRevert(abi.encodeWithSelector(Ownable.OwnableUnauthorizedAccount.selector, _caller)); + vm.prank(_caller); + nom.acceptTokenOwnership(); + } + + function test_GivenCallerIsOwnerButNoOwnershipTransferPending() external { + // it reverts because ownership was already accepted in Base setup + // Attempting to accept again should fail + vm.expectRevert(abi.encodeWithSelector(Ownable.OwnableUnauthorizedAccount.selector, address(nom))); + nom.acceptTokenOwnership(); + } + + function test_GivenCallerIsOwnerAndOwnershipTransferPending() external { + // it successfully accepts ownership of the token + // We need to test the flow from a fresh deployment where ownership hasn't been accepted + + // Create token and CoinIssuer but don't call acceptTokenOwnership + TestERC20 testERC20 = new TestERC20("test", "TEST", address(this)); + IMintableERC20 newToken = IMintableERC20(address(testERC20)); + newToken.mint(address(this), 1_000_000); + CoinIssuer newNom = new CoinIssuer(newToken, 1e18, address(this)); + + // Transfer ownership but don't accept yet + testERC20.transferOwnership(address(newNom)); + + // Verify pendingOwner is set but owner hasn't changed + assertEq(Ownable(address(newToken)).owner(), address(this)); + assertEq(Ownable2Step(address(newToken)).pendingOwner(), address(newNom)); + + // Accept ownership through CoinIssuer + newNom.acceptTokenOwnership(); + + // Verify ownership was transferred + assertEq(Ownable(address(newToken)).owner(), address(newNom)); + assertEq(Ownable2Step(address(newToken)).pendingOwner(), address(0)); + } + + function test_GivenMultipleAcceptanceAttempts() external { + // it should fail on second attempt since ownership already accepted + // Create token and CoinIssuer + TestERC20 testERC20 = new TestERC20("test", "TEST", address(this)); + IMintableERC20 newToken = IMintableERC20(address(testERC20)); + newToken.mint(address(this), 1_000_000); + CoinIssuer newNom = new CoinIssuer(newToken, 1e18, address(this)); + + // Transfer ownership + testERC20.transferOwnership(address(newNom)); + + // First acceptance should succeed + newNom.acceptTokenOwnership(); + assertEq(Ownable(address(newToken)).owner(), address(newNom)); + + // Second acceptance should fail (no pending ownership) + vm.expectRevert(abi.encodeWithSelector(Ownable.OwnableUnauthorizedAccount.selector, address(newNom))); + newNom.acceptTokenOwnership(); + } +} diff --git a/l1-contracts/test/governance/coin-issuer/acceptTokenOwnership.tree b/l1-contracts/test/governance/coin-issuer/acceptTokenOwnership.tree new file mode 100644 index 000000000000..3369cb0640c8 --- /dev/null +++ b/l1-contracts/test/governance/coin-issuer/acceptTokenOwnership.tree @@ -0,0 +1,11 @@ +AcceptTokenOwnershipTest +├── given caller is not owner +│ └── it reverts +├── given caller is owner but no ownership transfer pending +│ └── it reverts because ownership was already accepted +├── given caller is owner and ownership transfer pending +│ ├── it successfully accepts ownership of the token +│ ├── it updates the token owner to the CoinIssuer +│ └── it clears the pendingOwner +└── given multiple acceptance attempts + └── it should fail on second attempt since ownership already accepted diff --git a/l1-contracts/test/governance/coin-issuer/mint.t.sol b/l1-contracts/test/governance/coin-issuer/mint.t.sol index 7602196c93c6..c8d05a650745 100644 --- a/l1-contracts/test/governance/coin-issuer/mint.t.sol +++ b/l1-contracts/test/governance/coin-issuer/mint.t.sol @@ -5,21 +5,19 @@ import {Ownable} from "@oz/access/Ownable.sol"; import {IERC20} from "@oz/token/ERC20/IERC20.sol"; import {Errors} from "@aztec/governance/libraries/Errors.sol"; import {CoinIssuerBase} from "./Base.t.sol"; +import {ICoinIssuer} from "@aztec/governance/interfaces/ICoinIssuer.sol"; contract MintTest is CoinIssuerBase { - uint256 internal constant RATE = 1e18; - uint256 internal maxMint; + uint256 internal constant INITIAL_SUPPLY = 1_000_000; - function setUp() public { - _deploy(RATE); - vm.warp(block.timestamp + 1000); - - maxMint = nom.mintAvailable(); - - assertGt(maxMint, 0); + modifier withFuzzedRate(uint256 _rate) { + uint256 rate = bound(_rate, 0.01e18, 10e18); // 1% to 1000% + _deploy(rate, INITIAL_SUPPLY); + assertGt(nom.mintAvailable(), 0); + _; } - function test_GivenCallerIsNotOwner(address _caller) external { + function test_WhenCallerIsNotOwner(uint256 _rate, address _caller) external withFuzzedRate(_rate) { // it reverts vm.assume(_caller != address(this)); vm.expectRevert(abi.encodeWithSelector(Ownable.OwnableUnauthorizedAccount.selector, _caller)); @@ -27,32 +25,212 @@ contract MintTest is CoinIssuerBase { nom.mint(address(0xdead), 1); } - modifier givenCallerIsOwner() { + modifier whenCallerIsOwner() { _; } - function test_GivenAmountLargerThanMaxMint(uint256 _amount) external givenCallerIsOwner { + function test_WhenAmountExceedsMaxMint(uint256 _rate, uint256 _amount) + external + withFuzzedRate(_rate) + whenCallerIsOwner + { // it reverts - uint256 amount = bound(_amount, maxMint + 1, type(uint256).max); - vm.expectRevert(abi.encodeWithSelector(Errors.CoinIssuer__InsufficientMintAvailable.selector, maxMint, amount)); - nom.mint(address(0xdead), amount); + uint256 maxAvailable = nom.mintAvailable(); + vm.assume(maxAvailable < type(uint256).max); + uint256 excessAmount = bound(_amount, maxAvailable + 1, type(uint256).max); + vm.expectRevert( + abi.encodeWithSelector(Errors.CoinIssuer__InsufficientMintAvailable.selector, maxAvailable, excessAmount) + ); + nom.mint(address(0xdead), excessAmount); + } + + function test_WhenMintingToZeroAddress(uint256 _rate) external withFuzzedRate(_rate) whenCallerIsOwner { + // it reverts + uint256 maxAvailable = nom.mintAvailable(); + vm.expectRevert(); + nom.mint(address(0), maxAvailable); + } + + function test_WhenMintingZeroAmount(uint256 _rate) external withFuzzedRate(_rate) whenCallerIsOwner { + // it succeeds with no state changes + uint256 balanceBefore = token.balanceOf(address(0xdead)); + uint256 totalSupplyBefore = token.totalSupply(); + nom.mint(address(0xdead), 0); + assertEq(token.balanceOf(address(0xdead)), balanceBefore); + assertEq(token.totalSupply(), totalSupplyBefore); } - function test_GivenAmountLessThanOrEqualMaxMint(uint256 _amount) external givenCallerIsOwner { - // it updates timeOfLastMint - // it mints amount - // it emits a {Transfer} event - // it will return 0 for mintAvailable in same block - uint256 amount = bound(_amount, 1, maxMint); - assertGt(amount, 0); + function test_WhenMintingNonZeroAmount(uint256 _rate, uint256 _amount) + external + withFuzzedRate(_rate) + whenCallerIsOwner + { + // it mints correct amount + // it emits a Transfer event + // it preserves unused allowance + uint256 maxAvailable = nom.mintAvailable(); + uint256 amount = bound(_amount, 1, maxAvailable); uint256 balanceBefore = token.balanceOf(address(0xdead)); + uint256 availableBefore = nom.mintAvailable(); vm.expectEmit(true, true, true, false, address(token)); emit IERC20.Transfer(address(0), address(0xdead), amount); nom.mint(address(0xdead), amount); assertEq(token.balanceOf(address(0xdead)), balanceBefore + amount); - assertEq(nom.mintAvailable(), 0); - assertEq(nom.timeOfLastMint(), block.timestamp); + assertEq(nom.mintAvailable(), availableBefore - amount); + } + + function test_WhenMultipleMintsWithinSameYear( + uint256 _rate, + uint256 _numMints, + uint256[16] calldata _mintFractions, + bool _lastMintIsFull + ) external withFuzzedRate(_rate) whenCallerIsOwner { + // it draws from same annual budget + uint256 rate = nom.NOMINAL_ANNUAL_PERCENTAGE_CAP(); + uint256 deploymentTime = nom.DEPLOYMENT_TIME(); + uint256 totalMinted = 0; + + // Bound the number of mints between 1 and 16 + uint256 numMints = bound(_numMints, 1, 16); + + // Calculate the expected total budget for year 0 + uint256 expectedBudget = (INITIAL_SUPPLY * rate) / 1e18; + + // Perform sequential mints with fuzzed fractions + for (uint256 i = 0; i < numMints; i++) { + // Warp to a time within year 0, distributed evenly but still all in year 0 + // Using 364 days to ensure we stay within year 0 (before year 1 starts) + uint256 timeOffset = ((i + 1) * 364 days) / (numMints + 1); + vm.warp(deploymentTime + timeOffset); + + uint256 available = nom.mintAvailable(); + + // On the last mint, mint everything remaining + uint256 mintAmount; + if (i == numMints - 1 && _lastMintIsFull) { + mintAmount = available; + } else { + // Mint a random fraction of available (1-100% bounded to ensure progress) + // Bound fraction between 1% and 100% of available + uint256 fraction = bound(_mintFractions[i], 0.01e18, 1e18); + mintAmount = (available * fraction) / 1e18; + + // Ensure we mint at least 1 if available > 0 + if (available > 0 && mintAmount == 0) { + mintAmount = 1; + } + } + + if (mintAmount > 0) { + nom.mint(address(0xdead), mintAmount); + totalMinted += mintAmount; + } + } + + if (_lastMintIsFull) { + assertEq(totalMinted, expectedBudget, "Total minted should equal year 0 budget"); + assertEq(nom.mintAvailable(), 0, "No budget should remain in year 0"); + } else { + assertLe(totalMinted, expectedBudget, "Total minted should be less than or equal to year 0 budget"); + assertGe(nom.mintAvailable(), 0, "Budget should be greater than or equal to 0 in year 0"); + } + assertEq(token.balanceOf(address(0xdead)), totalMinted, "Balance should match total minted"); + } + + function test_WhenCrossingYearBoundaries(uint256 _rate, uint256 _year0MintFraction) + external + withFuzzedRate(_rate) + whenCallerIsOwner + { + // it demonstrates compounding + // it shows unused budget is LOST when crossing years + // it verifies state tracking + uint256 rate = nom.NOMINAL_ANNUAL_PERCENTAGE_CAP(); + uint256 deploymentTime = nom.DEPLOYMENT_TIME(); + uint256 initialTotalSupply = token.totalSupply(); + uint256 year0MintFraction = bound(_year0MintFraction, 1, 100); + + // Year 0: Mint a random fraction + uint256 year0Budget = nom.mintAvailable(); + uint256 expectedYear0Budget = (INITIAL_SUPPLY * rate) / 1e18; + assertEq(year0Budget, expectedYear0Budget); + assertEq(nom.cachedBudget(), expectedYear0Budget); + + uint256 year0Minted = (year0Budget * year0MintFraction) / 100; + if (year0Minted > 0) { + nom.mint(address(0xdead), year0Minted); + } + + assertEq(token.totalSupply(), initialTotalSupply + year0Minted); + assertEq(nom.mintAvailable(), year0Budget - year0Minted); + assertEq(nom.cachedBudget(), year0Budget - year0Minted); + + // Cross into year 1 + vm.warp(deploymentTime + 365 days); + + // Year 1 budget based on current supply (compounding), not year 0 remainder + uint256 currentSupply = token.totalSupply(); + uint256 year1Budget = nom.mintAvailable(); + uint256 expectedYear1Budget = (currentSupply * rate) / 1e18; + assertEq(year1Budget, expectedYear1Budget); + + if (year0Minted > 0) { + assertGt(year1Budget, year0Budget); // Compounding effect + assertEq(currentSupply, INITIAL_SUPPLY + year0Minted); + } else { + assertEq(year1Budget, year0Budget); + } + + // Mint in year 1 to update state + vm.expectEmit(true, true, true, false, address(nom)); + emit ICoinIssuer.BudgetReset(1, expectedYear1Budget); + nom.mint(address(0xdead), 1); + assertEq(nom.cachedBudgetYear(), 1); + assertEq(nom.mintAvailable(), expectedYear1Budget - 1); + assertEq(nom.cachedBudget(), expectedYear1Budget - 1); + + // Jump to year 2 + vm.warp(deploymentTime + 2 * 365 days); + uint256 year2Budget = nom.mintAvailable(); + uint256 supplyAtYear2 = token.totalSupply(); + assertEq(year2Budget, (supplyAtYear2 * rate) / 1e18); + assertGt(year2Budget, expectedYear0Budget); // Cumulative compounding + } + + function test_WhenSkippingYears(uint256 _rate, uint256 _yearsToSkip) external withFuzzedRate(_rate) whenCallerIsOwner { + // it shows that skipping years loses their budgets + uint256 rate = nom.NOMINAL_ANNUAL_PERCENTAGE_CAP(); + uint256 deploymentTime = nom.DEPLOYMENT_TIME(); + uint256 yearsToSkip = bound(_yearsToSkip, 1, 10); + + uint256 initialBudget = nom.mintAvailable(); + assertEq(nom.cachedBudgetYear(), 0); + + // Mint half of year 0 budget + nom.mint(address(0xdead), initialBudget / 2); + + // Jump to future year + vm.warp(deploymentTime + yearsToSkip * 365 days); + + // Budget is only for target year, not accumulated + uint256 availableAfterSkip = nom.mintAvailable(); + uint256 currentSupply = token.totalSupply(); + assertEq(availableAfterSkip, (currentSupply * rate) / 1e18); + assertGt(availableAfterSkip, initialBudget); // More due to prior minting + + // Mint triggers year jump + vm.expectEmit(true, true, true, false, address(nom)); + emit ICoinIssuer.BudgetReset(yearsToSkip, (currentSupply * rate) / 1e18); + nom.mint(address(0xdead), 1); + assertEq(nom.cachedBudgetYear(), yearsToSkip); + assertEq(nom.cachedBudget(), (currentSupply * rate) / 1e18 - 1); + + // Skip more years + vm.warp(deploymentTime + (yearsToSkip + 4) * 365 days); + uint256 newSupply = token.totalSupply(); + assertEq(nom.mintAvailable(), (newSupply * rate) / 1e18); + assertGt(nom.mintAvailable(), initialBudget); // More due to prior minting } } diff --git a/l1-contracts/test/governance/coin-issuer/mint.tree b/l1-contracts/test/governance/coin-issuer/mint.tree index 19134b170cdf..fe04250b8935 100644 --- a/l1-contracts/test/governance/coin-issuer/mint.tree +++ b/l1-contracts/test/governance/coin-issuer/mint.tree @@ -1,11 +1,22 @@ MintTest -├── given caller is not owner +├── when caller is not owner │ └── it reverts -└── given caller is owner - ├── given amount larger than maxMint +└── when caller is owner + ├── when amount exceeds max mint │ └── it reverts - └── given amount less than or equal maxMint - ├── it updates timeOfLastMint - ├── it mints amount - ├── it emits a {Transfer} event - └── it will return 0 for mintAvailable in same block \ No newline at end of file + ├── when minting to zero address + │ └── it reverts + ├── when minting zero amount + │ └── it succeeds with no state changes + ├── when minting non zero amount + │ ├── it mints correct amount + │ ├── it emits a Transfer event + │ └── it preserves unused allowance + ├── when multiple mints within same year + │ └── it draws from same annual budget + ├── when crossing year boundaries + │ ├── it demonstrates compounding + │ ├── it shows unused budget is LOST when crossing years + │ └── it verifies state tracking + └── when skipping years + └── it shows that skipping years loses their budgets diff --git a/l1-contracts/test/governance/coin-issuer/mintAvailable.t.sol b/l1-contracts/test/governance/coin-issuer/mintAvailable.t.sol index ea09e8630a37..86dee0a3a09a 100644 --- a/l1-contracts/test/governance/coin-issuer/mintAvailable.t.sol +++ b/l1-contracts/test/governance/coin-issuer/mintAvailable.t.sol @@ -2,37 +2,85 @@ pragma solidity >=0.8.27; import {CoinIssuerBase} from "./Base.t.sol"; +import {Math} from "@oz/utils/math/Math.sol"; +import {TestERC20} from "@aztec/mock/TestERC20.sol"; +import {IMintableERC20} from "@aztec/shared/interfaces/IMintableERC20.sol"; +import {CoinIssuer} from "@aztec/governance/CoinIssuer.sol"; +import {Errors} from "@aztec/governance/libraries/Errors.sol"; contract MintAvailableTest is CoinIssuerBase { function test_GivenRateIs0(uint256 _time) external { // it returns 0 - _deploy(0); + _deploy(0, 1_000_000); uint256 timeJump = bound(_time, 0, type(uint64).max - block.timestamp - 1); vm.warp(block.timestamp + timeJump); assertEq(nom.mintAvailable(), 0); } - modifier givenRateIsNot0(uint256 _rate) { - uint256 rate = bound(_rate, 1, type(uint128).max); - _deploy(rate); + modifier givenRateIsNot0(uint256 _rate, uint256 _initialSupply) { + uint256 rate = bound(_rate, 0.01e18, 10e18); + uint256 initialSupply = bound(_initialSupply, 100, type(uint128).max); + _deploy(rate, initialSupply); - assertEq(rate, nom.RATE()); + assertEq(rate, nom.NOMINAL_ANNUAL_PERCENTAGE_CAP()); _; } - function test_GivenSameTimeAsDeployment(uint256 _rate) external givenRateIsNot0(_rate) { - // it returns 0 - assertEq(nom.mintAvailable(), 0); + function test_GivenSameTimeAsDeployment(uint256 _rate, uint256 _initialSupply) + external + givenRateIsNot0(_rate, _initialSupply) + { + // it returns full year 0 budget + uint256 currentSupply = token.totalSupply(); + uint256 expected = Math.mulDiv(currentSupply, nom.NOMINAL_ANNUAL_PERCENTAGE_CAP(), 1e18, Math.Rounding.Floor); + assertEq(nom.mintAvailable(), expected); } - function test_GivenAfterDeployment(uint256 _rate, uint256 _time) external givenRateIsNot0(_rate) { - // it returns >0 + function test_GivenAfterDeployment(uint256 _rate, uint256 _initialSupply, uint256 _time) + external + givenRateIsNot0(_rate, _initialSupply) + { + // it returns that year's budget + uint256 currentSupply = token.totalSupply(); + uint256 deploymentTime = nom.DEPLOYMENT_TIME(); - uint256 timeJump = bound(_time, 1, type(uint64).max - block.timestamp - 1); - vm.warp(block.timestamp + timeJump); + uint256 timeJump = bound(_time, 1, 10 * 365 days); + vm.warp(deploymentTime + timeJump); + + uint256 expected = Math.mulDiv(currentSupply, nom.NOMINAL_ANNUAL_PERCENTAGE_CAP(), 1e18, Math.Rounding.Floor); + + assertEq(nom.mintAvailable(), expected); + } + + function test_GivenExactlyOneYearElapsed(uint256 _rate, uint256 _initialSupply) + external + givenRateIsNot0(_rate, _initialSupply) + { + // it returns exactly rate * supply for year 1 + uint256 currentSupply = token.totalSupply(); + uint256 deploymentTime = nom.DEPLOYMENT_TIME(); + + vm.warp(deploymentTime + 365 days); + + uint256 expected = Math.mulDiv(currentSupply, nom.NOMINAL_ANNUAL_PERCENTAGE_CAP(), 1e18, Math.Rounding.Floor); + + assertEq(nom.mintAvailable(), expected); + } + + function test_GivenMultipleYearsElapsed(uint256 _years) external { + // it always caps at 1 year maximum + uint256 numYears = bound(_years, 2, 100); + _deploy(1e18, 1_000_000); + uint256 deploymentTime = nom.DEPLOYMENT_TIME(); + + vm.warp(deploymentTime + numYears * 365 days); + + uint256 available = nom.mintAvailable(); + + uint256 expectedOneYear = Math.mulDiv(token.totalSupply(), 1e18, 1e18, Math.Rounding.Floor); - assertGt(nom.mintAvailable(), 0); - assertEq(nom.mintAvailable(), nom.RATE() * timeJump); + assertEq(available, expectedOneYear); + assertEq(available, 1_000_000); } } diff --git a/l1-contracts/test/governance/coin-issuer/mintAvailable.tree b/l1-contracts/test/governance/coin-issuer/mintAvailable.tree index 4bede564c2b2..05a46d644bf4 100644 --- a/l1-contracts/test/governance/coin-issuer/mintAvailable.tree +++ b/l1-contracts/test/governance/coin-issuer/mintAvailable.tree @@ -1,8 +1,12 @@ MintAvailableTest ├── given rate is 0 -│ └── it returns 0 +│ └── it returns 0 └── given rate is not 0 ├── given same time as deployment - │ └── it returns 0 - └── given after deployment - └── it returns >0 \ No newline at end of file + │ └── it returns full year 0 budget + ├── given after deployment + │ └── it returns that year's budget + ├── given exactly one year elapsed + │ └── it returns exactly rate * supply for year 1 + └── given multiple years elapsed + └── it always caps at 1 year maximum diff --git a/l1-contracts/test/staking_asset_handler/addValidator.t.sol b/l1-contracts/test/staking_asset_handler/addValidator.t.sol index a9ba48a5dc2f..9058e05dea9f 100644 --- a/l1-contracts/test/staking_asset_handler/addValidator.t.sol +++ b/l1-contracts/test/staking_asset_handler/addValidator.t.sol @@ -183,9 +183,11 @@ contract AddValidatorTest is StakingAssetHandlerBase { _attester, validMerkleProof, _proof, BN254Lib.g1Zero(), BN254Lib.g2Zero(), BN254Lib.g1Zero() ); - uint256 uniqueIdentifierLocation = _proof.publicInputs.length - 1; + uint256 uniqueIdentifierLocation = _proof.proofVerificationData.publicInputs.length - 1; vm.expectRevert( - abi.encodeWithSelector(IStakingAssetHandler.SybilDetected.selector, _proof.publicInputs[uniqueIdentifierLocation]) + abi.encodeWithSelector( + IStakingAssetHandler.SybilDetected.selector, _proof.proofVerificationData.publicInputs[uniqueIdentifierLocation] + ) ); // Call from somebody else vm.prank(_caller); @@ -201,7 +203,7 @@ contract AddValidatorTest is StakingAssetHandlerBase { givenPassportProofIsValid { // it reverts - _proof.devMode = true; + _proof.serviceConfig.devMode = true; vm.assume( _attester != address(0) && _caller != address(this) && _attester != address(this) && _attester != unhinged diff --git a/l1-contracts/test/staking_asset_handler/zkpassport/ZKPassportBase.sol b/l1-contracts/test/staking_asset_handler/zkpassport/ZKPassportBase.sol index dec1e508efbf..a755baab9bea 100644 --- a/l1-contracts/test/staking_asset_handler/zkpassport/ZKPassportBase.sol +++ b/l1-contracts/test/staking_asset_handler/zkpassport/ZKPassportBase.sol @@ -4,10 +4,11 @@ pragma solidity >=0.8.27; import {ZKPassportVerifier, ProofVerificationParams} from "@zkpassport/ZKPassportVerifier.sol"; import {IRootRegistry} from "@zkpassport/IRootRegistry.sol"; -import {HonkVerifier as OuterVerifier7} from "@zkpassport/ultra-honk-verifiers/OuterCount7.sol"; +import {HonkVerifier as OuterVerifier8} from "@zkpassport/ultra-honk-verifiers/OuterCount8.sol"; import {MockRootRegistry} from "./MockRootRegistry.sol"; import {MockZKPassportVerifier} from "@aztec/mock/staking_asset_handler/MockZKPassportVerifier.sol"; import {CommittedInputLen} from "@zkpassport/Constants.sol"; +import {ProofVerificationData, Commitments, ServiceConfig} from "@zkpassport/Types.sol"; import {Test} from "forge-std/Test.sol"; @@ -15,7 +16,7 @@ contract ZKPassportBase is Test { ZKPassportVerifier public zkPassportVerifier; MockZKPassportVerifier public mockZKPassportVerifier; - OuterVerifier7 public verifier; + OuterVerifier8 public verifier; IRootRegistry public rootRegistry; ProofVerificationParams internal fakeProof; @@ -23,14 +24,14 @@ contract ZKPassportBase is Test { // Path to the proof file - using files directly in project root // Fixtures copied from within the zk passport subrepo - bytes32 constant VKEY_HASH = 0x2992c925ad8932475d5784bf202b58f1c9d043d6ec04e236a7c761593caea5ce; + bytes32 constant VKEY_HASH = 0x254314c80a8cc3efc785643a0a6aeeba6ae268a45e69a98affd4a4155f01e186; // From fixtures - see lib/circuits/src/solidity/test/SampleContract.t.sol string constant CORRECT_DOMAIN = "zkpassport.id"; string constant CORRECT_SCOPE = "bigproof"; - // Time when the proof was generated - 2025-09-26T15:46:45.835Z - uint256 public PROOF_GENERATION_TIMESTAMP = 1_758_901_597; + // Time when the proof was generated - October 19, 2025 7:29:51Z + uint256 public PROOF_GENERATION_TIMESTAMP = 1_760_906_062; // Using this base contract will make a zkpassport verifier and proof available for testing purposes constructor() { @@ -40,7 +41,7 @@ contract ZKPassportBase is Test { // Deploy wrapper verifier zkPassportVerifier = new ZKPassportVerifier(address(rootRegistry)); // Deploy actual circuit verifier - verifier = new OuterVerifier7(); + verifier = new OuterVerifier8(); // Add to the zk passport verifier bytes32[] memory vkeyHashes = new bytes32[](1); @@ -66,22 +67,22 @@ contract ZKPassportBase is Test { bytes memory committedInputs = loadBytesFromFile("valid_committed_inputs.hex"); // Order of bytes of committed inputs for each disclosure proof - uint256[] memory committedInputCounts = new uint256[](4); + uint256[] memory committedInputCounts = new uint256[](5); committedInputCounts[0] = CommittedInputLen.BIND; committedInputCounts[1] = CommittedInputLen.SANCTIONS; committedInputCounts[2] = CommittedInputLen.EXCL_NATIONALITY; committedInputCounts[3] = CommittedInputLen.COMPARE_AGE; + committedInputCounts[4] = CommittedInputLen.FACEMATCH; params = ProofVerificationParams({ - vkeyHash: VKEY_HASH, - proof: proof, - publicInputs: publicInputs, - committedInputs: committedInputs, - committedInputCounts: committedInputCounts, - validityPeriodInSeconds: 7 days, - domain: "zkpassport.id", - scope: "bigproof", - devMode: false + proofVerificationData: ProofVerificationData({vkeyHash: VKEY_HASH, proof: proof, publicInputs: publicInputs}), + commitments: Commitments({committedInputs: committedInputs, committedInputCounts: committedInputCounts}), + serviceConfig: ServiceConfig({ + validityPeriodInSeconds: 7 days, + domain: CORRECT_DOMAIN, + scope: CORRECT_SCOPE, + devMode: false + }) }); } @@ -102,15 +103,14 @@ contract ZKPassportBase is Test { committedInputCounts[7] = 25; params = ProofVerificationParams({ - vkeyHash: VKEY_HASH, - proof: proof, - publicInputs: publicInputs, - committedInputs: committedInputs, - committedInputCounts: committedInputCounts, - validityPeriodInSeconds: 7 days, - domain: "zkpassport.id", - scope: "bigproof", - devMode: true + proofVerificationData: ProofVerificationData({vkeyHash: VKEY_HASH, proof: proof, publicInputs: publicInputs}), + commitments: Commitments({committedInputs: committedInputs, committedInputCounts: committedInputCounts}), + serviceConfig: ServiceConfig({ + validityPeriodInSeconds: 7 days, + domain: "zkpassport.id", + scope: "bigproof", + devMode: true + }) }); } diff --git a/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_committed_inputs.hex b/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_committed_inputs.hex index 76528377bb8f..26a515b948a8 100644 --- a/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_committed_inputs.hex +++ b/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_committed_inputs.hex @@ -1 +1 @@ -0801001404fb06e8bf44ec60b6a99d2f98551172b2f2ded80200027a690000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009099699583ea7729a4a05821667645e927b74feb4e6e5382c6e4370e35ed2b23c0543554249524e50524b554b52000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000068d6b1141200 \ No newline at end of file +0801001404fb06e8bf44ec60b6a99d2f98551172b2f2ded80200027a690000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009099699583ea7729a4a05821667645e927b74feb4e6e5382c6e4370e35ed2b23c0543554249524e50524b554b52000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000068f54aab12000a2532418a107c5306fa8308c22255792cf77e4a290cbce8a840a642a3e591340b011fa73686cf510f8f85757b0602de0dd72a13e68ae2092462be8b72662e7f179b000000000000000000000000000000000000000000000000000000000000000002 \ No newline at end of file diff --git a/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_proof.hex b/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_proof.hex index 67dca7321f38..62fe8380cfb8 100644 --- a/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_proof.hex +++ b/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_proof.hex @@ -1 +1 @@ -000000000000000000000000000000000000000000000004baffc8bff2e857250000000000000000000000000000000000000000000000029210fd8e388a64b1000000000000000000000000000000000000000000000008b9be81eda20d016300000000000000000000000000000000000000000000000000024d62100bf51e000000000000000000000000000000000000000000000001a4706f40be23e0320000000000000000000000000000000000000000000000031efde95da2ffaad50000000000000000000000000000000000000000000000023cc0c48bb2cf7e230000000000000000000000000000000000000000000000000002b56d42856cf500000000000000000000000000000000000000000000000b942fef2803a9c862000000000000000000000000000000000000000000000007a55fa04adae820db000000000000000000000000000000000000000000000006618664a2bd2ba2f600000000000000000000000000000000000000000000000000027d7ce166c49900000000000000000000000000000000000000000000000a7b5092a3d3c7c57100000000000000000000000000000000000000000000000aa075bad37f23fa2c000000000000000000000000000000000000000000000001a918745d35ed9719000000000000000000000000000000000000000000000000000160bd4a094ee10000000000000000000000000000008682c718fec37ddce1eb3c0b3b28a3134a0000000000000000000000000000000000219a3bb93458c50ecc425facbd9b6200000000000000000000000000000067d9dc3d1cff85ad46d96b423f42fbe5b3000000000000000000000000000000000011c4af3f50f3210f62d0a0bb51bef900000000000000000000000000000091e569ac8120760e238443a0c72f648e2f000000000000000000000000000000000011d33dd5a016465a99de6ccb3acc97000000000000000000000000000000866bdc6d1d739827b4ca5a924a85cfc650000000000000000000000000000000000025a38dee180ed4ca07f0878008a2500000000000000000000000000000003e3b33d7309e01921df043f35a62c1706300000000000000000000000000000000000ce24d8af04f22137664b154223da10000000000000000000000000000004acbee1c9b120ab688bfc03bfb0a95283700000000000000000000000000000000000d2da04a26a65a9bd5070fba2e493e0000000000000000000000000000002a171861b26b214d20a7cb4d428a270b19000000000000000000000000000000000006d220e2a01128ecd1358cfd02bb10000000000000000000000000000000e45bf2a5a87cf3f64798f8279a77be8f0b00000000000000000000000000000000001fc24780eee3fa3aedc08349ec0a490000000000000000000000000000002a171861b26b214d20a7cb4d428a270b19000000000000000000000000000000000006d220e2a01128ecd1358cfd02bb10000000000000000000000000000000e45bf2a5a87cf3f64798f8279a77be8f0b00000000000000000000000000000000001fc24780eee3fa3aedc08349ec0a4900000000000000000000000000000082dd33fc23eebd4131dd10951ff2a5123d000000000000000000000000000000000026364c11f4144bc12dbb4094a27dad000000000000000000000000000000b17430b9223cdc419357db857aa2e3c445000000000000000000000000000000000018fb79a1699eebd729b706c44eabc9000000000000000000000000000000a5272878f105c85d0493696942a1469fa500000000000000000000000000000000000831bc029c617a602f0d8dd8d7f662000000000000000000000000000000434c0b18f1a72903a244914e5af317ebd6000000000000000000000000000000000013846a1f7ee803f2ad48a471469398000000000000000000000000000000058659b9be6c0ba14ac9ba8942de6e593f0000000000000000000000000000000000063035ac92893ff734d5978132c9e80000000000000000000000000000004ab8a6ef5008a0a0bd82284d484c74220800000000000000000000000000000000001050149967dadc2d82a47ae9abee0f0748e7ad5727d323903dfdc1c3a1beaecb424ca3c657f84722c4ba66495a79c8291b66c58a09cd06281247f4bddf99ae5cf19ba4b361784a211d3b2da6a586392c495787de878210cd8326a42b68c84805ae202f4628c25a924acb458b9665d414d27f68f5d8b87bdb81092eb69e69db8027701aa88228cbe18cf5abd6ce4bc3089cd305a353e21b5f5796c2d2fdc3026923bd79f7a2179c379559df36e94c9f048f828e1f169213feb27771f1372cc2d5ecdb3e1b3db43f5e126732473b51fc065fb7deba03072d2bfed3e9cc6ccfb9c37180dc9c71642aa08e7d59cd0084c107795b22df0ccbdfc4b032462a94d7aa3eba8e2b6b20e4aec2fb6d397f6b83f22c1a8f01498032920ffb5e6c0d05646cf3d96d0b1696c575702e42257840e8912f8c5c35cc2d2f712f3494984a33a150cd6d2fb9f29968ca8d7cf5bf1a14b3f91f5731d19fa679d4c1b92c4dea27ea08aabf307ec184838300c568134e0bd89e230179044cfd9ba3626dfaa375e84773ee173843bc24383bf4b54d63d68149f906d22abbc0424f7416c70b7a0c8baa160390812cd640db3d01602433d90e136b0466e7ec3e6077a24d1cda05f99f91f7c9e88b5170cee4c8ce6c91f95b59435b129479c6980ff9d1ba6ff6d0e5835d23c0ff4db65b960782639d9c9614f00606236c14a9ffd8643994b002153a1434db6b453fe0af6495b2ce38a51f8e4c264d2ffcfeb45f97fae54de5807043a94b9cf9844c5fea9a8ba416c1231af3c8a55126a99abdc639a4f35a36efd149ea754c3d6c2caf6f4b1e118ef3f74acbaa0ad30d954600669fec2586329721ea64b6d150e2d2a1988c5607a136b0daa1460c920fe75297cda41eed3ec60c311200536d369355f8f0922141f33f98ad3460ef9e2cef383717635befab0b81eff93bfc19eded4eb2553a6a99cd430be10b7a9da927b72bdb014f065aed3c62dd43cc29b4a94e2d12ab6ea33165977ea8d8f8e5231be1e37d1ad045822a74edb1c8a8d22b6e6909a16c21b6bb035dda2c5ff3904f0294f011280b440b8011f46d46ea00d1f29892c09d8440ed4efb43ce0c36c20d02386a159eb089d518ad7594072113f874a7302c2999027c7c2bd5debed78da408c5b95c979b4147069f17c513669542ec35c27eaf011f19f3e45bb0ef2122750be3dc7c3732d1d0c769deaacc73d366050ca7f1a58cb785b0ed44925d8ddcb6066f6a3da57f18239b2668bdf763b6e3d02e4a3be688809e49db975bed77104a2815ffdf55214b6619421d82421bb70e84462c9ff0aa6681752c22547dada1022cf5158a804457f47074ce14dcb18ae94aa96427e237a57c06877a6244c5ea031f791d7f3524731008ac5ae1726390d021438590a1a93c6550752a567f65463d0927fbb1466284d0e99b20d3220dd5d755d82cf9b67d035fc38b42b22229af652a8777a2453455c819f4e4006b71ff5944d83c0a8e412b0027c031b5138546131e22d21615350313cedae8c7e4782fbe55ddc2a7928f8d64cde3eca738f3158f0e920c404c4d2bb1d8950b1516d532092d37a520372093004b824811e891ad9b0ce84e7960e39963f7b8790ac5516f6bbc9d968c82dc7ef56c79bd86c3e48257212f595f634a885f6200daf64259bcf7a5b9d1eb83e57ee316df55d7ad70590b00d9139b0774d61f97924f59501617f933a3e881c8616cb140211150c039044e129bd0bcf5572f9857dcf1e716f5c3251089211fa9397f0d55f2d3c2ff95c84526350f78359249155916d5a4ead031f153bfa3a9cfc73664e2e3afc803001d222a8107cd31decfbd214884d4add0ab30f495b5ba6128b2705153f5688aa119ff096f9d767344def3ca8fe9ed5dfdcc97c0ae6c4d08f7376f62342b12d3d03ba608bf3ef2d53997e53fb0d0507871c07e6ed4557a52d7ffd6f403ed0ee4bf101d105951ecf03634c2e396a3f1c9d3cc7631f95ba5bab59e6c88b2c9b70852926b23364cfd6fa8ebd19740bfbd53db9bd706f3265facb58f6ad3cb971369bc459f2d2e818ff691828cae3bc67b9fa0af564b7ff9b4798664fff850bb4cd72a4fbb1ad36958f3a3403dba45cc5eea5c18b221a2a1a380175eeb5574edfbd7b2771e1f6ec2b1776ddf81bbb289fe30faee6f0cc6b12301074a0f3b3ab4a6f5552af12fb2a114976ae7de7e80f43dbfb14a411a1a18d7e94e48fee9ee13affb9e460f2a7e7ee561f40b1ebd35c83ce699d25ee44857a9875222e37b79a875e711c0362a99b60dd3619cace8abbfce29d7ca167d8365e1e49786970f5f75e91a81ceb926aada6e0601f0c2cfaa0219fa3cf5cb88dc1b20effa69de9a7051c0a09d798a01171ebc301e82c325132767f9ece36d3851db54d3065d27c950834fecf166092ec13c218efa87579c027ea5627df0278cf1dbe33a408c2dc7cf3a4fa20d1872165486f24788a6eb604d7f80a61216f28acd8e40ae4fdad3e2f9d2b0f0a0cfc41ae5b43b1f54574b2eedaa707c0e2e8ca7e933cd5f14205ad05607b001e839f01b674b0564464eb240af55d32af5086193b816ae2255acfba5f21153096e46d72594ffe77c33881a1ef3198edbdd51cf3d95b277ef97218ad7d84dbb2db0efdb163ae0511aed790d2d469ea65bc873c3cbeefb986b57c3a07d523304f2b8dd1c049e30c6a49b4328e42a7813c9932b9178bb5f3f30337df2e483ba6f2e2fef0c2dc2ad78b33232fb44d922785edcb4605c8d11cbc06369097af38a509f42358a302d7960fd05500bcb135a3b7657c88a24e6611247ec9ae6aa94e386f870bd0d2aad468557e7405729b05baf41ede2f3e1077c388c5393ed38cabf9d027b5ed907f7fc52c1625178b7b2c4fe9ab40e27c246ad994ea29ab0ad8c5ee60412b4552e3d8167bd3242488c1a8d7b439e838e93ada51795165ee78784c22c1917709a216ea85842e4dcf25f52be73092157440dc1f3e551cbc026dfe08381fb299a7828d14fcb0146dfc820085183bc3fcce2a810584c78d07345f4d8e2d60a7190270451b03618b6865276ae18a895977f91fa44bceee3445723a39183eedba11ef12e5a23f7af513af2f2f936ed42a60a6902db1f9f517dec20d8ae2e5eb540e0082cccb3cc9c15794e87464c9f6380013a2316b935ffd54a7d95d7d40990a9445d2b4f3dc5062c1301da6d71ee3a73c4428d3173a48c01068a94e68d111581af280e22b775067abb0476cde747b9ecf972d91cd4d56640416c2d61fb1d9ef14b8501e36ce06f20f7997f8763545127dd8ea33d82e91e87b35c2eb818d366b9180925bad710052a538cbd822d0b01b7cb5d1cf723d5cb34e52fe98f451014c643b1131f5efc221d11e0a13e6451d3383a9f4ea25fb8349c2720502e59bf7bd519d3146904e1d3b6ce6cf0261b651c87c1a26c66bf1403de6c71fa7d332f0ca968d41c65e8014fc74dd4ee435ae4ec3c98afa192034413dfbe7585d25abd7b162dd505a9782385d8a839f086ab3decdaf14a85c6701d4b770bf9629bced0365410e316c4cb8713e8624f5b1fc2f3abfa73be676444f1895dad68f4236b74a70fbe2d075db884aaf204e8963a4d1b7d962f15a213160388fad5621eb5c3495c8e3e522ce7fc7b0dbc184751a067ceea7e22bc76182f7a17e09acd485b51a04aa25bdb01e830693dec28d58994ef192d5b2ec523da5cfae335b29d27bc89e8e554dd3c18a7c29726c37fe0650884c14de8b9182f4f25baaf313aa83285f52dcb61bdf01a41f04a9d9dbf47138e6462383e194065a1ee7300f5ede25857fa5787850d322be9772a8fa1cea4f35afd47c782af7aa541676c2e0054af238ab39585559c530fd75bb0cca8b2b71a81b1fd4c929e0b32e17ebed8d96eabc0086a8846f5fa040aca3ea524a10717ae4a20fd27220a614cafcdcc5f4fc77506c91df2491330b824bf12c11c7a43044028fe02c038383f7f00d0f25d672f0f0b05230c137251bc15df5f637a8bb7fe2de86e8ed4595168b9dde62837511af389680605225b20791361c5561ebdddd2c6c84d23cfa59a0add26dc34b415585b5025e89f0ddf86ba03ff703699affa5ae3906ce0fc4fd53336ba69c5dadf1e98020823e2351763fb2bf96e66fb65924291cb6b67d7e7b003f6868546cb3a7879afd5274d944db16a2458531a724c7a0a0ebab57ce939c8c7c9d1ba6321474af894b34b5f32d4624d134e6d47dfbeaff3b4ede4e60624404cb05e45795d45fe847da7881a2ba62d822786d145bb5c5ba98f64aaba8c655131c7516670cb4e3548e047ba76fd73566e21fc66ccdaeda7e542f0a5df61e6f7d3c2fcbe04c42ee88f07638826d5f710b80ea221bd16b9e310d4ced058a2ba60f46842f9d96d8f8facf4ddbca724739fe500fbaa5eb24edfdafc46e4da1fbbbf67bdc2bc1f55cd5b4a2f2620564f9b65a72829d8d3e1de6559e4df9858504a67e34480fdcf35275dc33f0a6f72422f772327b0208beedb3483c9f96506b9459693447def1ac3a06fa91d240784bed1b1ca06979712dff52478e6bc3f5eaaa9f9da01fb75b4c625b2b6c3dae73a26ab03cc04fbedde6e4ccd7a9de39492c3c528915ca7ffb1aa85897e0e6c8def6f733bd413bf819870ca819d8c7812d145c669579993f722c616b494f6f1a9ca988936812da36973fddadca4760baa0e50b71b691939096c66eb05b4280517f4f50f12b706ce4163efce67cc7aef0b9ae2d4764ba35eab124fa5bafa7c71e09493c918c3143f32549b98267640e9740c4979fcd68d556152ed33bf61e9f531635125459f14db224cf0bfb928170f1962840268470811b13ea9eae964878a02f9331497e605813012bccdbc2f6243680d4a2d883b0fc320f372fca62a8fd11918ffcdb3fe10279e28c38c03d4eec81c0aa03ef0af8768fdf22d26c7edd40b4881dda430b5278927ee64d55a3893db74d32777da570348e25c5b83757101b26d8b5779a45403166f0d2d4233ff7decd0caa8358d71eeed3f35747521115604db26c149217122a2df1ef7fae9f9ab702da3b9866b41b987cc1cd32a0a0bf80a79b4e644b05909d8276d8632d3d65aa72ca2aebf27f686aa7b37f39277c01a85b30decb3149127c6b3f58b247442770b07063b4484361fb7aee7aa01141e15bc967c24658649178ec5ce131c35eda0a569c1cb48a8a90f30716f758a0dae18e34854e12078372172a7f8da98af3439f83146c37066d4d1ae19d59aebe84fda8984a7ce65713614716a459899e4a0c595a6680987e58cbd9a465993d53952f78c24e964cf7805001e3ae8280351c38cfb3f6582e35d2f4271de27b1909aaeba6141c6336b8b1b2b4d0d299f0a9805b4e44f074b5ebe2d9c4b6414dbe3f5be71763a6197c924e72fcbdcf796c15d4e06f36ee8bac5c6d27f4e34d7c7f9e5f48f4c244bfd0c25ce13a3a9fc79b47134a7e3c8e9e105d1aeada835f56411bf8c30c289b69492540d0739bed0a5f40fba2026a75300c286f51a4f0874bd0050f7d62ac64f4a3efcdd06193101b1609a5cacc6dcf564e160c87ef2d025f15ee9514a48162fe7238dc2013deea48f0ff06578f997fb24cf01235c81ab667fe403bf82f864d4249183911c15010ae391d24904863d3112651a6abcffbbd8ec92c649bb9aeed5f8b2ef1627abdf9c65b08adf7154d3bcf8d4e0afb6248efdd53b086dad3e44cb7a2293932a92e0022cdf15268b7ccf2f8ab89de546892232aa82a65664d44a3f9c028f0321aed061b76beda802e8731205fb174b57cf0adc9c1667507108e6dec90230252cc8c4279deab733f1a2747458663c618914f1cc66c3e1cbbb454d7f594c85df162d8848fa5a997d8e8e1e451801185a02ddae50f1c638f891072a90efb5bdce1864674bcbe73c7d66f9077d23fc496eaac56a3632195a3f2275e5040782350f0b4ec334280b9bae4e43d1f0474b4b048f94654d8e91ec245f7b5ba1408bf5910abe3258a3cd51d482101f8d176ad0ec7ed68daedd4baa4e44dc9856c6b14ae500f8fade96b519d5f7746d504a1560baa9029e3356bb95f607c9b6a2e685ade018d7abb7dd794608649e85f9dca3d33ff7faf01688dcebee7155d8bf806978082d8ad316824dcfb3e6198dbd6b119ef535f8dc02a91a9236e761c9670704434e20e9991c52952bdd75a1f00534d9991ccda2051693917fa4d4874cccdf2242442ca678d01500f6c42c5b258de905abeba79918b7a3fee87a7a8c4221a453e0971c6f166b94d9b838a4c532997c8a12df5e9fa2d378122aa95c3de15056de1d3f234cb4c32d99559fca385c8bc71c439d322bb2afb7e764583cbfacd6f2fcd5a201dcaafa3ff8bebb4b0298d547c6ed322706a196addc30b3185b1c77ae1fd4ca27a850ad46c5be1773793e23732f7f40a5b93d469b9e3a2f2d7fe67f50a0a6d91a2c7d364214848a0ffafdcdaec94c0156c4c2943bc11213a9cc0f083731821325338d35221ac3677716dd303e6aaf4bff3c80ddb165113aab2786c3470fb85e2836204a9c0fb595110066fa0f30ba19106b6f524743e6e5748f3d011134ee2e1b6b69c650d5f5e179c18bab7f522a8365f5a3d98fb4108266761b824ec11acd2dbc098ee135c35047006013e8fc61e263322df18a0532b71f0ea4689df03ce011fefee4e241dd69a26c26c1aba07e54273324c0305888234b221a2c2072638104d7de2a9af6cf4b37c82b74e47d959db5cf9ee910d2eb4f2cff489bdd9387bc0bd648f54e53a504327eaa01e66519701b7ec96e0065d3a12623ed23247709be107abca1bf4f0a2ef024694e10f6408c16345ad0f3c4084b15e0c4aa468c7a811a35292392395c29de9405056f0ac95264bb7785693d14ee93599f746402b321163374c43f153947bb48257aeb3709bb3fcb5a8e23682353b985a1796db54a3a071282fd88bb45820c1c60e184b5c29e823350d07493ecc067507f8229f72df405998458772057ba4fc13391f58029dc2a04ac2e3bde56b7c7332197e4a93ddf0c90827f505b2d8000bbd388638d8a1870aee5c5088379205d628965070816d32f200f7baa960cb33e921c21774f5638b2ceddf5ab9e2e02afaf4b29e7d7669300755579c77819a3a044ba6d4d00ed7deaef812cd4c37ddc88cbdf32fec9d17804da9ee40392d89961902d43cca1bdb96f8cd22f2b8396f5bc58c1d0d357548a18092112f7dd8a50ff315bb32d89517d53b99df5671cbaf163cb66652130b856234f5186cf4922fc3112d59722c3a13ad89c0c96d2b282cd94a79ec5ee0061ea195471f19dabff09d2beb5c22ad0cfca2da6cff537a671ab9400371ee0d9bf63042cb465fb48925a77f7bc0ba497e3d3997f3f2bdb09b6e7a961427342eb4c532aae2fb8664996bccb85437c1ea09e68c9f56433742d942062b454295f17f4731714799d8f506dde7aabbbcaaccecfd69119e54eb9b1b6a1d1ab6299474cf66d2c87323455c095a49e87d1ef15c44ccb5ad526ef41c9290b1b3290d4cc2cd41a07f7a99e7c796f3374ab1d88d38c0eb9b417dfde42c368a7195dcfa11d95e8d5108998b653cd6f744bb89d305dd42fc1cc13056ff944ec95446700aa093cb5bb22f5e2694cf48a5d879a7c54a5b30176353878b6958e31f20d2295ef71bda7830f1be5dc17a7b2cb857c40a3cf98dfdb75f19774751b975a4082eeb6f0663b6f074017d6e7e17fa7849551831d684d5b915f8ae1f58bf35df3d2faa6c297c4e908e0f870e61608381b6e01124ce26e41d051d51380d0535840c5425d7e4f9e201365db0f65674ef8c9a7b31eb31ca4aa56f399673f39827481d3f42b4faef28d1acf64a157fdcd43a52a7ba3cf85716ece3144e97719deb5a4c9ca33da5ed91a1e26c73317a2b5859a66568f7cc68eaba2853bc5a5c3e46a53fb4c6b64ba4d892339fe424690941c2e018c27e8541b5e16fd4047e743dcfb813b97ca249e6b3121b413a6bd7709c69f56e0dbe72c7fa9da2bfec36f5b78c650c5550df177aa3b04ed13022dcf2cb5b4471a8957c0599b64d5508b36fcceb7b7ef5059af5e7bb80847eba78bd5935e638b43920c9677618ebc9fb4eba6742a030f084602dbc86820758923f81411a74baff16859149f4b50c9646f55fa7d59e4ee6ee7eda53d690d38a125f95e9df25ed8505e5f9a63b5bc9ca878ffc5cda8fbc6fd28b545c18b2845e9362e3eca38acb031ee44c76eed8e30a54c7060c3211e662c35eea3706526fb90ca55e56577fd84106a73ae8b72dbcb528b94b5bc8dae0c6675fba18c062d68721267e070b78022634c2eec3dc9745fd1f59e49e1bbb6ae30b6da9b591d000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000021740d511ae3ddff122c265e84ef1f620c6c56b6cf1842b1d032142a611b3e8923a2a8999d245b28ee74728924f4fa7dc59763643f33346c5167171d78776455043b59c38c3c237ee826803422e04a2c6d0414af934b25ea1e77bc5ca8f2c6972e9844990550298856b82b53976c981ced71731a5cc783f09eddeda7b8fa70720572201c3f526f2dcb6d24d87db7fcbdf090d1f52a20a58ee5130bff1322951c25a3f9c21aff3ba294ba59c53ec198d78c9ec6498b1656c4c9317af5508422fd2f45d220188d2bf06060950bbdde63c092742fca0398bd662b151966b5bdd137275e840be7054035628fc7c674730b4701ca2cea4b56e5140e7f524a4675e6a228ee3c2bfaebca760b965eaf50bd1e67b019f1b4048643dd857841b8e4c9336f118a4b3783eb5fa7e2571a8504d6a60940ab03040d4903d25916b22bfe35822a22bd0281e642ce39ef625327331fb92ea8aafdd756b43eab3b9d5094b569edae227a5437bb6c6c24eb5a448184f84e761f257028ebbe31df1d7c251ddd2a594818fa49dac2d8703d7624f055ed00828e93f7dfb0f7c43d40a798c655704de47e28e92c5366d6ca84551daed4f603e2a9c3a26b6ed308977b797e5f6809e618880da2542710aa60641219d0b816b6d78affcab0d83be70a7effae7230be76dc451a32d30d0e2e39a313bb639534184b81ee211b6f166120e933079520ffba3bb906632321172d776bb60ab5e356bc406dac4f4ac6a310c991da1e255e8a93e9290f4842b7f9e0af1db64b1b1b3aa10db58539266b234bd83b4828a6a026b583ca047fa6bed5b067ab7a4e08c6d97b2f9e4747ba872c534cea7d715f7adf9321e911ed99f2dc1d0bb22bb9da032743b73f7554ef556e86a060df0c2f554955e717236f37d9d93897fc06b735883d5a693b5a48cd4ea35ec8d3d5d5c14f5365928809ce3042f3b9f3a7ef6bf748a76bab35ad8b663c2168a749a879ae0930e1896424f80f6a6db32789ec0daa681484c93928ff5828f1ef1688eeee81bed3fe98d50df092c86f4a8efe529bf1188cf50fee7770ffed96b9e9b3a7b6900cb3cff3580a08126716f3cbd5c182dfbf50593475e9cf60773acc704166ae73231479455b040b29e1f8e8aceb849835203f065ed65749fc01237a29cac8039c142354019425f6c0603bda897b5343f8cd30e09dafbc4624075e72c75edf810567217b51060b72f7f5623c602515af072158c385c813ff1a0ad63b5d85dec076e5516e54951ba35a5d69fb4fe9b6411da1d0255f50cce0d8dfbffc1bb7eb7c630659d73f6b217514362cf2ee62b3ba5a87ca30455bdb658ba857df04d49085ec4730aa751c0419e157ba309bf2610e29caf4207d3c875bd4090f421e01a44c5d32b0a8dd4228d26a38db2ebc31a859af7d287fdfc94a6965ecc555d6dac92e4aa1f042fbff02f30604996078f1dac80a697840e1611fa583f6927d8df8006b80e98b32da1a2d04b70dd6f63bd62ea0a5534500c8f4b3af7cb0a9b47b1a4e1de8188e3d3b442f7a08e07094f10d696bd0f3fec56b1d5dd0c03578ac30310404c5ca08a5b42c2f7a08e07094f10d696bd0f3fec56b1d5dd0c03578ac30310404c5ca08a5b42c1ac8c02bb96a0a80f2900894c9f348f2e5152027de751656a059178680030ee01c30a926ca0aeb0eaf71a09828bc3275612e2dcbbd3c6e25067d58b7cb1d1da30571331eb7d6227d38d9edc67dd79020b91ae3dc4ce8238ca71ce42f58f792801490e2e075ad56e7f183c7f0a4397010e73dcce8c4aea42d43ae4836957387a909f41f207bedd3ba36195b41bb87d5c9a0a056bd522f36630c6ca7b30d0629d6000000000000000000000000000000a771d40b225f73eaf52e3cf669f878d298000000000000000000000000000000000000c859a09108db99847e0a4db609b1000000000000000000000000000000df322d27c91d474a38282c04e432765db700000000000000000000000000000000000422dd2c4755a37c5402c5f9a554c80000000000000000000000000000000c21fc2b54972092f0929c914ffd9cc830000000000000000000000000000000000018c954bbc8625a9b483219418398da000000000000000000000000000000e68eeeea753e286c91e05be70b8ae8ea27000000000000000000000000000000000026df9ad2ed12d70f394c26b72d7075000000000000000000000000000000e67cfc8befc4dfa27b2dfa148568ff3cf8000000000000000000000000000000000009fbb487e7c7d0ad857cd3e07bc73c0000000000000000000000000000000801addc979d2ebe1785a2f2dbd3b8c38d00000000000000000000000000000000000140f776ea244d14d9970857dd3035000000000000000000000000000000c9a80765251d29c1072aff82cdede6903000000000000000000000000000000000001ac71cff5db99be506ba86291c479100000000000000000000000000000090ccd3f19037ea1aac136cb9eea67a96c500000000000000000000000000000000001023814dbe44b9905f3bdc078b0d9c000000000000000000000000000000a1a29c9cd93d840d3457babba62bb63d170000000000000000000000000000000000200dc2bd40238303733c472d38f0c80000000000000000000000000000003904b80fd56f4daacf438523da89fd64cc0000000000000000000000000000000000159c9d0a3b1f80ca181a5604e948c2000000000000000000000000000000ec7c166e0dd2abb079cb598cd996bf344d00000000000000000000000000000000002fc1219e36494b76b462cf05d8f9450000000000000000000000000000000015813b73055c9bb33ff2ce19717105880000000000000000000000000000000000055853a1082a237dc39f44c14d5e39000000000000000000000000000000aa713dd2270d03e42025fad4e662233977000000000000000000000000000000000023e9ff50315408f16a797f3ccaaee000000000000000000000000000000033002a30e6f0cc5a5ef644d14174a3e8b6000000000000000000000000000000000015ff78cb3cd37be266468d354c66de0000000000000000000000000000002626852edacdf5223a3e63d8ff46792d640000000000000000000000000000000000171827c5006f94beb1e5860f8394b800000000000000000000000000000013d0f061a439cf63b8beb0f8a99eaf0004000000000000000000000000000000000019225d2024ba05408559ae6b61b12d000000000000000000000000000000614a63882cf1fcc04a827fb3499ab8c04e00000000000000000000000000000000002d4d6c17000850a9aa4352822d98d90000000000000000000000000000006a61689f3e5d44dc85450e761b1f12d39c00000000000000000000000000000000001964f4b321fb7aa6ed58802a0db95a000000000000000000000000000000a587a5628db15394ad89c936eb655c78fa00000000000000000000000000000000001cc9f4228921f6700d829f1e8370cb00000000000000000000000000000016e045c72259abd54a8384241e32c07c1900000000000000000000000000000000002caed52183d367c0ae32d6fb1e47f9000000000000000000000000000000b775054852d003452e71e7ca2b12c4f73f00000000000000000000000000000000001f084a905ed0a30570047fe893761a000000000000000000000000000000800cc6a2ec05fb1272a2382636afa30cbd000000000000000000000000000000000001bf304c6bc90a141bcf0270d518330000000000000000000000000000004df73b4634333ec3b6cf841eb24d70986d000000000000000000000000000000000010b6e002171999c9a5c83541e9717100000000000000000000000000000054ff16719cb9d9743417e1311a9e57450200000000000000000000000000000000001b87ca285121d8b6aeca9d4ef4f03000000000000000000000000000000000473ef1abd3bea5f4b90974c36397cb44000000000000000000000000000000000019e4924398db0b97ded67ecd000fc700000000000000000000000000000059ba555983f79d36c58bb2b1d4120630a800000000000000000000000000000000000747e0b89470b9ce20921d10134ef4000000000000000000000000000000f3ed803767f16e3ebcc12b42ba7d5f3db500000000000000000000000000000000000269bb9a52e57cbbd1baa7e2a368f0000000000000000000000000000000bb1a16e7d9b6005ae735e884bd0afacb32000000000000000000000000000000000022b3bbef6d7979cc30b02863a53e1d000000000000000000000000000000edb4c75841a85946699cf8c1f25673e318000000000000000000000000000000000029e304b2edbc111ca753d2bc91786600000000000000000000000000000002a70bd1946dca177312f8dc53697da01a00000000000000000000000000000000002fa4e53192553f8b1bc80b34768e690000000000000000000000000000003562c709c0a06ced1d02ae34448b54cabf000000000000000000000000000000000001e492c3d8cff2bfe7e3de580e9b2600000000000000000000000000000042854dca5d7de42672616616c490947aac00000000000000000000000000000000001e449378a86e6230fd7569383bf09a00000000000000000000000000000059b60c33944d6305b9e24ffe6c3eb1c7db00000000000000000000000000000000000418ae58cd904793d56fd7a274060300000000000000000000000000000086c6fe53b74ab114aae1118dc1ca9651780000000000000000000000000000000000231657c023fca6d4ec0198c03bfe5c00000000000000000000000000000012f1df50f6d8b3512f0a80c40705adf9a300000000000000000000000000000000002d1b4eb5deb6de3afd9aa02e4df5b90000000000000000000000000000002dab9ea8d6d45274347a6edd63cca0e82d00000000000000000000000000000000002ac2433e8757d93c6b5b5dcb1bb54f0000000000000000000000000000000e0ae773e1e2d2513ee6a90dcb2a4f2f5d00000000000000000000000000000000002d8736bf3bf80599ae56f0d9779a500000000000000000000000000000006ae004e684644a716dd897a55adccb25cf00000000000000000000000000000000002ab31035ac8a127330d2af3235f504000000000000000000000000000000d08399d372608e54c699eb1de1c225603f00000000000000000000000000000000001cefc1a7f8fa968e69c9d9b94df8c40000000000000000000000000000002d786a4d3f6165463a83db7d3e094d5f620000000000000000000000000000000000129658dfa03cf1a9b30246c2f1805d0000000000000000000000000000002e6dd4cc2cf6dd4568cb4f1a181e099efc000000000000000000000000000000000021d49c2d49e305812466ffb9f2bf1100000000000000000000000000000088c640f3c8d92aae456bc93823dfd9f69c0000000000000000000000000000000000057fddfa2ccf227e5db633cc46560300000000000000000000000000000052ddc8f869e8c8f6c4f223f1d77626297d00000000000000000000000000000000000c0e334573eef0147c196d78956adf000000000000000000000000000000d18fb7538c4716112b27203df02be0d256000000000000000000000000000000000019df26b9f2d993a43cc868803f92a5000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000bb7bc5ab26869a3e2f37d825b106829ea060f76b9564bb4e37e5b6ffec3621c0e4fb10cd37f848ab625c98e985598ec3cd49d7d6af6476d4ad8d79ea70b20510260bd496dba62076ee58e3e61836a492b21bc3a6e78cbcbf35582ccd4750c7a149bb511974eb4a60ca4ee1ad0ff49d37a98ca492b8aee8eb07c435de6379377088776d42fd0538430805ba5ff32da4077199e9cb007309b96f28fbe389667d10caa0e14b6ff229dfc81126633dd9abe6d0e99302dc52aaec345c7308d2b2224115a36704be3824ca94b606a7232e4fde6edc5489fbc19b348dd1e8202bb132808026b88b20ef24d7afdb0990debe156ea83a8fa18d8e67e517d7a8384649de51d98a269269cc968aa8f3df0ae5e2bc55edd85ef79bd83d2c992453f14655f451cce691aa35c64bdc62c1c3b8213b944f9b331fb8784aa6f6ecbe674692142e82c0d829d45dae53114457ffecbb6d69e6213e0d220432fbf72a2868622020a9c0f2d4dca898c1520fe7dfb9e91043e69fd51fff8338c27ea59d57880a8222e2318f4734935fdf117ac23157522e3024c1b7a335c6daf323f700b7aca07210e5f2224b0b223ba6c22258a785fac28180e63ff3893ddf8fefd2694ca20e0b999800ac46a831c6927c7e870324e5733e56ab5d801f776eef876e2124b7098ddf36c1b23ef201c087446a0de883c56da502421780bbd3fefb64d742700178283ab902014ab792681faba936870d833d451e9ddf71585329fd3143337e02b57910d6b0708d5f06f0a81541ca39b75200604a1c72d6db8e981ce939fe7c6cd42d62c521c2655236c8a12b96a97f32bfc424abd2714af4a954b86f566c3c69b4b5d75521efa9222ea3ea20743c1fd352b7780b66d98b5413f70874f14c41d05b774c90215ab331461158c9a1e9af01f12d39b3e267c58ae84f015f233e66a401cce3337010eabb8f0f6fb61dc3f5f58926d69aa7917198e1c619bff0474c5f723b679ec097b202025132bbb828f288f7b26d161eeff3b44cea895a6e450a361016b87bb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c2d42e6eafc753f625186a17089a54e5d200000000000000000000000000000000001d5f8e7f78892494c199c72e6306780000000000000000000000000000002b8dc9daac30c911e3bfdf1dc31e73bcc10000000000000000000000000000000000006dc962d078908123770ed4d18c4700000000000000000000000000000018e6b0e7d1aec5c31eb0a770991bcfbe4e000000000000000000000000000000000012ddb1677492e48178bf617146c22800000000000000000000000000000067abe223765fbb6c252e22dff202a83c6d000000000000000000000000000000000028eca648cab9846d972ae8d2fd8197 \ No newline at end of file +00000000000000000000000000000000000000000000000a6709c841157111b000000000000000000000000000000000000000000000000a3e7673ae0de1b59d000000000000000000000000000000000000000000000001ec2128f5f2b79ddc0000000000000000000000000000000000000000000000000001bc590980bdbf00000000000000000000000000000000000000000000000069e74ff8e002039800000000000000000000000000000000000000000000000573945775c1b354e300000000000000000000000000000000000000000000000a4e3c66bf7b4ee6190000000000000000000000000000000000000000000000000002a30ad74a73010000000000000000000000000000000000000000000000042a27dcac60ab250a0000000000000000000000000000000000000000000000061e1db41ee941384a00000000000000000000000000000000000000000000000cc6f5c102237b4a00000000000000000000000000000000000000000000000000000260bde67d4f5500000000000000000000000000000000000000000000000e5e511bf2cd61ab2800000000000000000000000000000000000000000000000b54fbf04f587992cb00000000000000000000000000000000000000000000000395efe1edd939e5cf000000000000000000000000000000000000000000000000000278b6c5c20445069c8f7ff48d294a0247ad20fd5a5ff5f1845d4fb8633270bc34179a491bbdcc00f32f365af85b41cc47773a021a2824a9240c86fa0e113bf6f2d06155bca53504f8013c9bb40189084a7d6708f1403201184d7dc7b06970ce91158210f226330da8f8f3560c024e741786ec2d6fed9e0bffc089c02eae30b060bf1898ea78eb213dfc583003c852ba3c578a3850b6a00b85ca109f877b2adbbdd9d76679ae5814e0278ee9886d52b454add49e514f2767a87ca96806ad894a7d9a22311849b60efc36c622daef926c7ff188b2ce12962daf5b8a5e29aa2a04d3214b96e70d3c1533b983c01dc0e465c7ac0cd79b71a325ed0f8393e8252aa5608c5c1b873c720efc36c622daef926c7ff188b2ce12962daf5b8a5e29aa2a04d3214b96e70d3c1533b983c01dc0e465c7ac0cd79b71a325ed0f8393e8252aa5608c5c1b873c72282d59071976e6e51546f8e854f7e4eac289dace6f3c284f30ab078b04717da60e8a498c7e9f3a21f6fe8f53b12412ae8aae54b44e08bc7a6ffc65c63c1e691b2350db38e9696f0b0fdfd692f2bc4137f132413aa09934ac580e2ca9d5c7f48e1a15dc20a33c4c7e63ca9179389ab7338441881f2a59f6e2f4cbe52491da75f52ce03f64f3e1f0ffbe93a49e74c95680b4cfc11be933605f310ed060e4cdec8b19876e7f68ab1678e0b4f2829fd81d21878812d4bcd32974ac32f069a9cbae4d288b486ec9f47919a7ecf40bdbe71a1a449f71bd6c287822a98f97f1bb68061107d90604173d2710106351aaa59a3e42e394768b0d90f86e9a525da23497f9f02c04d3649576702c1537392b023a7d9666244d13ab50202aa94256581412be0c226abb7da4a911ec1eb8816b1e3867efb5d7213780004ff589bb7a3abb7e04ee0788c6fedc9a1dd15cfcc21165189d9f458205bfb04feb2e12698773e9f99dae1fd7669715c65c59f68cab9230b1776d877700d8f9be36bceddaa3359cbc13fd2e6ab4f3852d5072309581558bee7853502aa3d278cf38a90489c912626560e329fa3957375116004538379a565e3edd7f161476219686ca4bf70dc76a91773115f4b1e35029a4b3989b88e22460f57a7af66131baf65d7bbee9fe870722595b13b96ae57eb15acc197844b294015c2e2f458be750b026df0a5aecf901f3f28119b3d8e9074101cdd1e7424d7cf4831c65e975910ae62bd81ebfcdd4b247717801547ce8d0bd96899b61e6090ce8269caa64e55aac81450c479a84fc7027358d2ba162f66ca8fcca6e9b539eabeeeca656840d2286bbd87ac5616573898c827f07f1263e1402871dba3c5eff98ab4eba9ac0eb88d2366f829210109aef22b71515e1ca788b5b0e64ee081ca6f14a8869ac647b324a949e068ed67862130594ef222be530863db8b6cb569e1a6a4111f562cc8ecc2d4d0db86aa2562e14470bf92c872f6f65d7c905a10de13bbef4e667c60e88767a3943a6ff381a06cd6285882ee29ee02a946a485e9987541fdd4606cd2df213d299c14f575ad23bbad30cfb1ebc1ea4142dacde8524e112d876b23508ef848dc6c6cb66ec5769e362615d7c0ac6618de99f45d7b60e2db488024a18ae4eb35688e19eee5f11ab2a1a0f4ea109812613bf20b5c56b124a72c13a5c0feabb42166da6805215865e956c81eb0a03f59428767b68694d30da2dfeb6a105965215dce9b2f38893d9bd9aa64639a22f430e75c51ebb216868be44a33b41fac9a584069a9b38481e9fd9acf1399a2f0a42a01d8bdc7d10103fccd7e3f7aedf0a9617c5f5f5103cd4388d25b1ccbc251e7c582602dd5907e02d8cdf6a84cccaa1fb036dc762ef4edcf0bc785e7d63d51c9d9d2e1aabaed2edd04687bbc4682ea1a04cce3a5adbad03b91cc32c36b82706c12acbe7ac5871eca86249287ee31c816498c1119586302fee015c7f153919054b72850bacb00be447381abdb3491f3ab9bbcf9e99fbce164617fa91e5e9c8257681e3a4667ede05982597c4f31ddc839320c75167fb96091d237b279b418a1293da8c991c161e18dbb72ed946b619240a85159ae532693af9b54e69e73e1821bd84fcabc18e45570691b32205b17a94b80e2070505ab3991519d835ed9b6a1de21aa8a7d3e1911e2db89ca2acb462030a672ebfac17c24a375a67b978c61520ae21cd5c970c4dadf3108e1cf42a050064d3573b401115b170ecf2773cba3012c3362e08777c9183e024edeafb4adc84dd6d3ea5cab07a3a0abc995af74b77214acd6ee74da6fac5aa5de162a6a2f691d21a9903c74dae83f467c902a22d7b298dabde3ffaf4264b98d7f9b4e36346fba7c7d9bb95c9a44c8bde00cc16888d1f0d134059a00f95a428fe425edd509b52a49df3386ddcbc671df3983c5ead0b1c1725b2bb61e45729bf0fac4cffd35ab23b1200e710618ef2c3a2bf70ae54771091ac3466d3b8fb0278a3134a94b663c615f2662375f89c51894ab8ddab2ee720b8f7db7858917b4938f0e9cefb376b7e4c52ddf93a63396d7afdee365cca1627a8e2783a47dda8eb8996c582ac6eaf644ef3dcce4bdcceb74b271c7fa59ef211191d39e27125a17c3bab7d4d5d46b64fd803bb844ef2488a3b631734e3a7e809500d5d0808027d0bdf05d8a28d8f2cd0f70d57d05b2be596890953110f3aa11842965a83c55784fe7d5cd544022d2af8b6611374a681aece352fa3a2c138180535df085adc67cef0235e75b4194555b6a28aaaabc130ac01ac49ec805accd02a473abd793154eb3baec21129a41d6442ad9dc255224d0eb5d11264b9629e181c64d3b4cfd8749fd4d66540555efa93ed7b22418c5fad11b31dde111027b23b08af73642c1b98f934aab1359794486f301c825e16c3deb6ec30e1de02c28aee1b700b2381399f27848e02f3de5b39bf3018c9e03016d259b6feb31469f4d31e20723eed9896f62b891da3dcc8f2c65d3a206c5661e06efd17e8ab761145f7462574942e5deb54d877f3fe377d420477dc8c548e781dc2d054a649ef85e001a40f18d4f5f5cecca15bb97b67bf50f4a7fe5c53b258e3f985911ddb94480557fc1f4874db78dca28abfe52f82d71de2c41e90e0abb8806e780e81bdb3e2d57d001ac3b7056d77b97d65542392d943c17f22396bd7292626da14c810f94d452cf921e028e14a6a173ca7ab2a98baf659f760612da6ab1da68652b2c826b9fa871305eb7a4c11de573a95828a7e43a26983776884c93e40abfa2c788045cbdc8c1d1515aa55390cab09f6c1116733c62d0add3fd3731542b4bc416360eafe8996231a7961fb50130690b5ff5f5f476586ef20deaaa1df0516eb6a65900fa045c0412b20cae3f218452e1cac3f41917a4a0f8afa326d4420f734b92d62cbb158377c2f76840aa569a36fee849896c47a1d4861630ebf76654f8fa76fdc88e01d162c2754788db3da839f25a488faabcac6c7b705b9265ffc7db575286ff803b824660b0764a71f521c63d18f208310dce04e8751da0232043555b830263e4e70b981069a96fbc5f73b0f267ecda55920300576e77822035e90034ede52951544982f136cc044e7fb4fbd574ae8ff2c505e95a44a0cc44a4b6544e9d7a2135b3937190e86d01d15b218a5a7cd9f2d2214da7d70b0a62460dd05470b990e296234e53f1d2795091ca7e3ac5b77c16d103f95bb77177602e242fccbd2d51082f408a93225fe12ffc3a94abe11473d5f878a4f4d8cd845b63d72ec99e663c5ad6ccaddc42e1e59e9fcfb5c271859d3a50f5c9d491457b8cce7adfa389950bfba20dee14210724737239d6598ecb84bdb792a4dc18e70215f1db87c8f199fca42f640c6ae1b6b23380948cc447db3fca53262c9f9c03685c183316b1c9da4d3461479cc96262100c14e7ade8ce2f5814218a655c976e71f60e1dba37cd2c9ab2b09bc6ce0181266959bbaf40c4c7a482f8024dd235c075519a8da95cca36534638df777b9139d765c2a1fbccb6db4a16872ecc3f5619fce054dc0ad7ae70355ac34f2a0091d29441014f9c153c6ba070e64d744f563bfffa2f2b8c01f90709301a63966cc1960a519054a9a3e2bf7db154bed3a45eb859100a51417899eb39a9d29fb2bcb1de89111c9d2fc6e048e74eb9148d3d15cf7398e513a70bb71b1a4e6a6203f53277b95f0499a8ebebcad73d62a26011632be35f9d2981f3cefb542c84f016f3e302797eb9808ae6d10d556dec796934d73602be8260ab33b7a13406f46c2e82b20707c58819abe86b0d7cf072149db857061a68e3e0fa1d1f774772dd7161f012754545ec83eeb6aaa26091bdb7b5251117dda5bebc33b37ae43d04f35b772cc2372836080e0aebce7282b251e4990c37cf379f69626be156f77bf8d9b95289b1790862db8c94e5d59cf5eafb2286d9b0395d52147197bb1469d6771a7dddc5d12ff0e964af447c156d08d0e088ab14ad1b1b4081318da97324cb0cc60daea55154066bc540a3d784d49ea894e7ee9708d0f626ea0d72df45604d2b697385b3d290bae9763dd3379505eda7e4464c8fd7045159159f6b2e5e9dcbd8e01504efb144df7b6fbb82a2ac91bbe33038a06547d12282cf505df64829c20fc6f089a830523fc4b35538c5d79daafecd2aeae7be7a71da5c84b85f90dc852ede24f63462440243105e24ca58822d6dbe468ff2b0c98e39122eb40e580cd9e40ced09497230f5072a5486dbe6836d5ca29b4180aa8998a18830f641cac1bb0bfee5028381440aba3ccde836e0c77d6742eb83de79744fe5d50426aeb0af09f9254d001c60ba5077d8cbcdcd97e99b8528def100c29187952941868ac264a20596ecfe4381cff81ee4fe617f16dad40b2a748c367458a69e95762ee73959d4df9585650b819a0a2ecdc16073506ab2c4a609b4211ad6ea36a3a04d057bf5032fbd918cc13135eed2186bf9bd676a6a2291bc42b56dfb6916b6ba22da386112a821b3ee9482c33b82cc49158ed4364ebbd6c490e577514f7b9baa771095578dbf38cac6c770765960d80290d4a2aa7404692519681b60fb975dae63d44f22612ef72d095b421837cd134b86003f60e8566140cafc880f761f0fc330f67cfb95de7f66c509c0ef0beea055eca5d019ec56026bd8799838dd51dd2420095a74db79efd4a75382264cb9b816c6227fc14d59e61ddfd1da7cb7d402bf636c4c8d08b382487fb442e14afe6701ad670076d9d15370daff144cc72e8a0285170722e97fa57773be723c76d5a8b46042695c2f6c760ad94047331f6e18a9b44bb7a6e100951094dc025cb20dfc89dbc5c24b7c6022f9c5750d63d729dfa0df194ce136ac4345c91901768279ef5e556e6125a2602b09a41f48c70831fb5392357557ef4f71d4438662ea0e8f22cf45eabb36b760d269e020150ebf7bdbfeb7e20a77f47ca6bc2447b0858b7b12fe162e6e022aa5b2c1bf275ed574160ef17c515680e44cc3fffdad0296c8955e4941e4417b7345dd6cf726bcc18f4ada38b324a6e1eca8c3d283ad22336deaa3a6ddfe1a465434f7888bd5beda52a6f9f1d8b0a2a60e049ed0cdaee0d124f4a9599c60524b42249b3677639044998933b11d9486adf45b9fc4b953321ddc753ff6176d00b60ee0be6e2a06ba3935121b2bed4a94d759951ef87771c06805c420d35f03296c5d0fc381467974c8639fac16ec513034f43931ed5dcfb212dd0d094375a5aac977e0dc1863d76a4785ecdef39970c55b38ff07ccb022516fbc12745cc096620f8d69e8f71e543dfd91dc9e24060c38775ad5eda1ebc7804b2bd2e841a32519a0cd610c7d15de84cfcf1f98753c0821373d4fa4cd3af2f01ca0355e9f681b7796220f785c592f932747dc1a9afb3f9b6509b496a3dabb41182f102317f4799248ce73f4aeb48813ba7b9a81b4a2da7600df1aad1d2b4730a94ba0b7175116caa3599c1fbc0ec7e1c19b7289d530ec492d2d0909766c71b106fe223d6cb04ce6ce70c61507cdc9d1b0dd792dfc583cfd278fab16b88417404b0db0259b1da4efd6336ce630731f7e00767bafe7c38863f41449def495aaa00f1538021d0c61a6cb6c70d4936ea548d105392e36a91d689c83e2b03d5b3150aff1d77c06bcc1c1cc1f6c25f2eb6b613b23ab1b46c1818ea06a1de1651853e26f74d1efa2483af069d9b1d6967ba7cc70e8f17e60b147c4a342e32d372389a2b20ad351e9b8ec45cd28a94aa8bf2e3e72dfe1a6a8ebe7d7db328c33b812eab2ea0dce2465a04e099b2d285529ccb05202de3c5f098021541772419df03307c0054698c6d484e6623a85f09e973c11fa8d986286bafebfa2054a8f043cbd1c40490ac9b4ebc2266fe22aa38d81e9ac688907f86d7b741e035668062c32a6c5d2bbc91c13d184dad5ea7ca50c7b834eb69f5a124819132af80fe96ea0e4dc5d71b18aca6a1f759c8a7e491b28262fcc0cb83526e7a5cb71af032ecbd038a141a21347033a0186220678a5e8ab308bd1c44f7c05fca00cdbcb960a234e733d355201a63f89047bb72f2443d074139865b62f925a70dec5b3944ba23cdd3d1bc572c84dd38d9a9f76572c7d00809ec65ba6467573604419e43871a63c2c0c918e212700e2d42041852709bdeb98aef638d0898f93dc6433101bf88270ec5b37b1906ffc6d9b99e5495197249053d3448e198cdd8715ead9ea8f4c03db8adc039a72ff71a25058af55ded14b9b79471fb7f64e16d8cb72c4d60ccc9c5446e688dfb27afa786394970b02406914e9187ff7bd5fd744a146269da7f9614f35313b9622074d148bea3f2af08bf17d56bcd64650b172380141e4585692d9a1e9bd962c92624b0d79ac591bb70c9878910e894078a8dcac67d3fcfd2f38e1ac67c98114a2241c3eeb71439a7d7ecbfbddf56ad25c63b6e81561de30176b45ea89b1029ae2f044b66bd432bf2616d5edf51af683abd1ba2502d8d7d44d76362bcc71e971c21beccd1f68e3270ff051c4ea233f6dda578e64aa32207137a3f8d885cb867062644c6930160e58febafe3a352d4037bc99c91fb936feb8afb5380da1ec5efe41d3fe866918f5e10da89a1f227dad7e51dfe56b0714725b285c4b7951f3b68bd267cf86fd5bea3db48bf84f815abe12cb7c4885ece46873eb0aded4fb793e08e24370320f9d06354fef101b590c8bf14dee8994a4f819abdc861757047bae50f0b02c4964feb290566b60eee92d5545ead0d6880c0dc82fbc9fd9f7546f49cdd2b9a55867f5e650e94b631780c0512cfafc95c54c0496e4b97ed8a4b6e58320b2286fce786cb4b055d6480b4f99706ed205fd0d677d5debdf62fe5ef49dc9b0916a7d5a6fe7b5ce275162e1e86b410e2a02d8ebfdb4201bf2b16bce1b3a737012fb3bb272837304affa9e0455c47f5827f2cb637ddd4da9af3f385ab0a371ff30a44cc5bf6afdecc6a65956edb7be8d079d0b2146e3f2ea3b18d00ea57ca6d5e2d3b321af29ab87ab51105241d3bf49f949f40e2b2fd0ff3db8a6915a04952071c968dd30de55af68a0f99d92f8169eb2474da3df05826fabfec8c21401b67a92897f5c503f9ebb2d7d17e9f66e90dc57c8670f18de40ff41d0759c6b611ba032d18e60086033b455d8655690f8daba435c6ab5cd27a25ed8dcee8afbecb0dbf0f2c3da2d859e6252410528e76eb0c9e6a43ae48352b7deb32815cb353f50059294d583fc76933d7b26d121788417105d322002a8705fdeb1b8bc62f46e7a7300d9912266993ccf1dfd23237a0da53e5576988d5df8c2742bb191c89ad69bf6607c45d9bc313ed94c6b06e3cf7421cc61269f94dac5cf953d505b62fb574787f2ffffb63871250cc50a9354e5187f0be27a2c8f157e70f6b58fca5708dbe7e0603984a94a63d84da6a9acd9982319d3cd1e5572d4a81f27313b1d75e8c192ef51fb25c9c7f7676121f2ee663b6a8764ec866accf9a7c7ef596f0d19fecc5cb1e0bfdd0a79b78d5d1d1944e36af06d7626ea898c035908e4e7d097e772953417a1c14c3675b22e995eadf9d8ede1031e1add0634790b778cb669976d2c277991c1f2f0bb102743afbdf6cd4d9eb36215135d1f9e55194c0d9cdf880b834fba7111f74a3610687cce1e2f478453c12971f8765cfdd94752f4899c7ec4dcb4b528515d45cfb7dca6a178757262a09086d115b4d9224d57e23d56d7f7eda0b0a2a261e9e9e1645a2ec5e6c01cf61795e163fad1640163eb58a9c0041975d750ce61317bd23f5fff585a38a3d8415bae632526332335df0fe74ce5b4c2010730b1bd200d33e1b6cfccee04da5b180f0e7348876a7feb5f723a95f39924eb17d423f0a2f4bd85cc2a1afce4f462014cedc171d12b4d716cd6480f204463bfd6ad321a6089d73781f81cbb72428dffd65bcca355eb0ab66992a446679915413cce10e99299227d3f2670b6045cada6a056ac7706add2c9ed9fc9ec93f98b2bfe67cea1c01104280d7d46994d525a985cceb7db47777632d4c4e7f88c9912150b0b6cb0d29785923a656b7de5b10b70f5967b4ffbe4d232d6b622b49c112fdc2f4cab83322d3d9895a66ab9cf5d2d0b035c005f4e7b15cbb54f3ec0e09d2e048ac1094a813188e5a8c0e5cc355adedb06db8137c0671f3559728fd85ee09849d4045191c1cef24ec33b83ea262ab76ed8130fab3367356fe20fdaa8e9dc6c8c8929462511f4df2e81e93b0ea2fa22a8580992ba01bf770dd8e146d56751649a38bc9f28319e73d083b4766fde9fd97573c6a0dfbef160de681bb602cc32ff0e38a31a9941260d0911d58ff48ad79994f7bf674b19757aebbf290256db3ad2bc93e5b319326cd0d4dd59fc922392b1431b68d275c3648cdac5dd85d2e68c8048254f0f28b2689e002316c9f40ba336d822e8d76f18782b71be332370e1bdaeed46a6de55609f9e0038d3cd4449c5538402b08d1f6b3a82278dde21808663998d6064672ff1ac564bb9ddf8481ba5903cf100aaabfe241ff1fc36f8ca1973c27aabbb04ee41405bc73660c5615a598a0fb80f128e4416ec7aaa8de443ba5c575c247e049c621bc888f3b7eece3457753d5b815d5c4e3ab6494820afb2f077f3b039fc2795e22ac27dfc440ca1f3262e830ad912819c3633e211c730ca08f19cd992ffe6ce409131215c68dd78c59590e02cdad4faa827d0eaf4f3bf584cced37e868a02d7126fc702a5994f73d503a22059cc3b78822bdc8f392c2ae96c716372dab14bbbd0eb49903ec1174932c5b06cb5cbf0ad0e4561b77573301aafe1cc9afd8dd277d22b7c49965b64abb7095baad41ac288c66f0966054cf7cc38caa6a4aaa19cb612977945a7a1fea5ddc77ad422ef6c7a16c35438b264d03832782ed96c050684a008bb7770bc665177204355993f92c1e0de46cb45375de83eed1fefe78896b1a0ba64cfc9cc692f20e3fbb797d316771f7b5fc333b3943856d7d338e611539430b31f306765d9a0893ed6c4cd15342d08aa13caa96ab98f96da57cb8edb01d5205e96e6f8658f767021db1bd298e70eec775af714f9e543be5fc7d8273ab070714e7ba9e58e1aae50dc68f464cfd85986d0795c3f32e10413c1839ecbb2af01a212d58284d936261baebd87367faa86ad3d350bb236aea5f79128d0724ce78a6086f38253a669d862db1851c6e7241e06d996bf0672e0f8bc49e1768d191a823090a5b58cf56e4b73f2bd0adee25c74a48682c9c5b7a1d7d6258f0900434b8890093ba367523c33de334392ff0754c29148f4fdfc22341cb21a413312fd68344191ebe2737fb70d71722aea8d074e35653d7257791c47359a410d667e84efd1b1beebf0a79a35ff0422e3f68e92a16e62bf80707d9e8cc502ee0291eaf367c61027c64316476d17c13a0763c82d28df21b8eec7a4513b6c4b11da8fdd70b55d62d36c39e245f7125da1b1148f6ac48cd14ccf8d95d27d61b2766d533d4ac2d9d2e62d4a3915ee7dd701fd28801a50b6b30f26b9a5dbe414f65164311ff51edb905eb782f6c7953bce207cb1c53ffafa1ca63bbf3c8070e99084c9d9edbd50d7b036179f1ca0e2ba21855083ed80f1af6e6c4780e162de236d9bd639701c94e27038d26bb4fc5279026c0e285340d697b024d7371c3450dd4f166a9214839b2a50fc290c78327659c2bbfac6d19ab6198c287f3a0995b557ad06ebc0d6f6bfa80169d2ccaa47de92f3be6b45f4acc35be27f9355baecb0be7a24045be736ae4982f4f25e6a93903032ac381e7907ea94632c8a250222caa9a2e4716b02d0ff38c2808b93eeb8b1e53bb39250c45e43dc6f8242359bb5370ba7f90a97dc966bbaf0aa6b9cd3eb3476fa91898a553b45f7166fcfef9794b04ea4c6f7f251d20a9bf1e65facc5c3a502b232f725dbe59f508a79a130b5716efa7582a6040bf386cdc1e31a1a774acd353f4588e5b723708c0e85e2dabfb1c54d2b0190936e159f39926f14417b20c7ca4cd460b55031df62011d75e41941bc609bfac7e61d7ecd238145acfcaa4fac8f8c2c9e5fb283915b7653cb30bf89fff4d4aa86926dc48bbff1aa0510e85cac71c45cf0c8968f64c68a913ab6d7187f9dae93f371d542bccb123a28723a79dc32384c0b888f6c18a73fbd049c3975203d2d005188dba5c95eb23a28723a79dc32384c0b888f6c18a73fbd049c3975203d2d005188dba5c95eb159f6da4b691bfc23785884663d542bc51dc650d158629fbfcec7d4000aa98cf053721c744a39b6c8e276f1aaf613b40a8158b9d304fc6df191364dde4bd3b6c040b5b2919f54fb2e3f3a93c4d08eef2ddf1220b88458e13f99337ee26c694e627d52c9142662b48f7f1548fa998b65fcc0d5234f4774b9f72f30d63004216d72b6f6ed8bb5159a0cc5c75c29aec802b88383edfb61f7d0a24048b0cffb5e0e918bb27e62ac84b0c300f0e34656e03ebcda8b2612c6c8680812e879a9fafa7272277bf9c68b3a97be3ecdbf465b8afcdb4ba977545bf03a7d7fae6f66cc42af90a490df5085d76a8a1d751caa5e3c2dda6ba0012fbf7d69b856d114458543fa72e3b5dc2130e06b38602561f1e6d1488c8562725efa10a0806a21d7e57555e121593c6a94db7721651232c61221bf8821bab5ff4bef7ffe2d48a35e3a79137051e13e3db2c53b90bb71fb0682f18e93f5a90402d20d490771a68b752cdd15ce707eb9091026a78c6d123aef2794d73c0917ec724d28a9b347f5452101b2094bc1c2fe289a2bcb20a9a2eb5f11c21b21b274d05f8416122c2262d6a3a6b6fa49a24425fa099f343f7203f0f44e60ea400c78abe83f48efe597d363de0f627574a0ae9155b41930673600ac427487307b88fa3825afc353208627e5dc5a3ccb0cf14d0428982b7e667a707d12802b52fd964a7c9cafcd8d74d05cb7773fc2788182a753381959ab82728a294beefaf9099b84ab03bb90148bd85486491bc3507f51cd3f0c413db5cba2d12f1a576a862edfc6e2252479d8978344d7e61a36c65341732a1e1d9ab0c3b8e5ab0dd9e1b2e3f791dbadf37ef95fb046605cf6e4d7588187f3a90890f0cf3e4b4a5afbe751ac29b140c679ab6400adf8a66a30e6160a22d4e1c364a1a3998ad482fed316a1b956e4cc9073c1d00b0e49b136f9816b91121886edc6748d1c4a3d1217aedc35e02adbe2da4ca4fe4fae869b989df33ccdb1a229de00e78f53699fcf0500a74bb3b8556dc67d39857f27579123989983d261a77c9573e75b398a0961f829b06d90e1fb6de12a66a4fbd643ca21fb785ca8e25346217b6123352ebd1ab05bb6530963a29832c4e397661a80a03e9def2948b06ce66c0d6a4709536e21e0b835ba3e37192f536763356222031ddb8748bd1132e91037ab87e4f931440e7f73dbf2b88f786393df80d99c68891489935c5f7be222a8fe38cc953a10abf8ca14ee28b0ff59f3dbaabd89b1b336c7d869bb948092a17302306619c0dbabdba6462d68554924bb049e6d5751598579c922a9cef13284163c3a2c04d4f155c204127e85c96c40ff90af4673d7eb82b0223727f58c4103b910c831e82257e4940d70d1cc115b9f9a36c724c47b57eceef68f4b5398028cf6b4e5740a0baa3fd5a82c4dfeac5195f4783ee2eda2766e866047031588e2ea32e651461b6afd353d3d60d2f39c976a6b557ea32e84cebe90492893b932c267647941eff869fc2c2205cc18764c1c4e9e10ceb612c3dcac64e284707e7aa29f705f908cfea3fc0e8aa1ca02a8eda62f376f4b044adbd925cfcddfcb1ff42269559fea026a6b88f54bc9f6f2aefa268203fd50bca718cde01f0851c143b7d212090bb321021e6b058ea2f2b8a0448b2e46d61cfd9a7b48e00dafb6bc637300c4ea1f105920b327f8513ee2c0e25b4adf9603a5f7336620a02d2a4f9960706286b17f1b5d4ea0af98b5513b2df04d739b1b2d2828c46ca30173c50da2e76bc2370f58f976bb02d9ff99a7f6922a64be5c0644fdea5d57764a657ae59db85db28102b9864182c06b7a18aca782ed3feb4066b763fbe8d1aabe3fc77cc7303c22faf5b1e20747ee99b5aead084cf010de9bb6a6f8444666ff85bf2865d85854b21b90b4f72f2b401b6dd9d1e0b4b9882c819ccf529feb403f58d64fd8686b837063b12b96cb5db0369dcb7890db7a0ce5fc9b72788e1135297dbd972296e36fc1a6f8fe46eb59cf9860384ec6b6ab6c39ddf8de7507f89c9d603a45c9cf4320321932dca1ab1453733f5489fc6ac4f4e72b72acc5f23a610e4d06c909c3d835f089f6e96b9dfd21766a62f882565d7ccae7a7782c0904c9c21a3eb344a24323a061a477f344ac6f5988c68547d2db3a8ba40289bd4741b65b8df7d14da435e9d2aea93b6d72d6b8d926607378c2e8f8e9564ddaa97af9504359f4c805854615608ccb919395924e41b2f4b41faa757c032648c36faf92879d943131b7026165f0fbccad1f1d6ae4c5a541516e4850a6344c60e437ca098a5688d9e74c1f063771ef030b5de2a47690ec9873c9d609666b07841a9d402aad0370285620bc7bf152030228d6735dc72cd10695ef45cf0b7b61eaaeae1b509f57d9bcc00f00b583001d8279682cd0e3a4c907161d3b612d581834bbb044f07d4bab00b557c41be272ddb897f3b2b743a26294ccac3e841b3869c6ab516ea113c8ba90d0ef873945f1247e7ac6a438d4d01c93e9712662308fb8884df08168f22e306e5e6ca8e2a2b19d7650d5ae1ea34c5f73d9b77894ee5f23627817a287439f469170c1dac54500825c507574047960ef94fc9e42aba17633f57565dc71529ff9918b066cae84d0f913460f978856b4c384feb0c54f9c44e29b5c44e32a78f0b19e9657e5e3d0823e77d18c48c72502a24323decf5818983e4e1984567da221aef8b1509e9352428acded52e12ef3c69d6d788a2afea6f289f72cfc5bcfc1066257e3d6fdaffa60a3124c0bf2854fc0a4368f852d424d5b1470195f2a3787c68a5366214aad99529abd229629cccde06d49a040728473e0f26092a84f7e8664ba104d84f24288a13588554273dd25d53cdbdf79742ea3cd2f0a95cf869d485b90fd81acb6539670e9affbc65f725ae3eee8d38e10b4f55ad2b13340b0e7959fc6c02e0c33713382912941089ae2e1d9e910e94cf2236d3b54533571660ebab4b7820b5b9c96a812b4328b31e545d5fa6e8a9b3702c9aaddab020f7108b8a1e6ca788293ac015e00b55d600c014d0aaaa874e2debb007b7d1aae0f20963b77fe3bbd3e67cf2ac6c007f52f3b7ba79aa39229923e9ec44d0ef1ff91a42064c6d3b6047e54b31bf1e04c91048f66d6b4f1f0bc44234f7c0dadfbf0dc0f159906fa268dc0637247f2b25cc7e2959c39b9ae9faa79850ae58c6feff4058dadc9a4e46cfabe98ee8c5cb2c457aca466ff0290f935a06c91a76c7ff04e36047904fac9ec914f689157e57048c66f1faa38fece4a635ad34fe0e2e78476b901c22ce2b2b93d066f3c24f601695f4fac98b5115b40d70b9bca085611a5360b39a1ff893431ad90fad76aa0b02072264586484d0e2b1261d600274bd71ef308d81952235e584b4e4fd7e82f606737538127cb9ef9d07344d6e7813d7bfd2bf56d3a8661f37b0a614205d7ee4 \ No newline at end of file diff --git a/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_public_inputs.json b/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_public_inputs.json index d4ec76663d41..f07ca48e9111 100644 --- a/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_public_inputs.json +++ b/l1-contracts/test/staking_asset_handler/zkpassport/fixtures/valid_public_inputs.json @@ -1,14 +1,15 @@ { "inputs": [ - "0x27eb6ee351be62a96a3209749074c8641d2b15845a37d3f312ad805b0f4ec4d0", - "0x1b84a41b213ad424e50a306f4ea6c961a66caac76915241df4dc93b6757dd9cd", - "0x0000000000000000000000000000000000000000000000000000000068d6b132", + "0x166a7b95f72ab538ed181b0888ccee18204f98dca373187c42503aacc290a72c", + "0x2d88250ced89385b0135ba36f190f858fd329b2694f479c5a605884c7f194384", + "0x0000000000000000000000000000000000000000000000000000000068f54abb", "0x008d535e2a7f4ee38a4d12aa88bcf21d2c2f6fa051d12eafba6655bf37e8c11c", "0x00f54fbb0f658e7013ec2114ef095a29bb3e2f95b96dbd93e46f12f67863111a", "0x00ed45ee6335b3285f58a3c472e398fc6754d1eaa9d7043a60b2daa0a67332e7", "0x000105354ff92c66a5d42a8d34033ace18d259e4dc04b35a584dc370c00586ba", "0x0037e4a54fb3f1500058797f475d37f2f8cd9b87f20762c21497a2ab2f655f64", - "0x00123482330ec434a086016334db04fdd2ac4764617511c8904dcf2a0343327b", + "0x0031f315a5531bb674ebe625f9ff8298ed0c0a07a173287b47e57691138fb09e", + "0x00dcec2b86a5267e6e92db9cc0017a2e89d62df782c4254d856bff5eaef8c391", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x171de101deed3f056917faecfe6cc04db2ef02689a8a483962a688948ce44461" ] diff --git a/spartan/terraform/gke-cluster/auto-updater-config.tf b/spartan/terraform/gke-cluster/auto-updater-config.tf index 07f23a9e2054..47470c739e0e 100644 --- a/spartan/terraform/gke-cluster/auto-updater-config.tf +++ b/spartan/terraform/gke-cluster/auto-updater-config.tf @@ -75,9 +75,9 @@ resource "google_storage_bucket_object" "testnet" { }) } -resource "google_storage_bucket_object" "ignition-mainnet" { +resource "google_storage_bucket_object" "mainnet" { bucket = google_storage_managed_folder.aztec_mainnet_auto_update_folder.bucket - name = "${google_storage_managed_folder.aztec_mainnet_auto_update_folder.name}ignition-mainnet.json" + name = "${google_storage_managed_folder.aztec_mainnet_auto_update_folder.name}mainnet.json" content_type = "application/json" cache_control = "no-store" content = jsonencode({ diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index a33bfa8ca7b9..5a01f5290f9b 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -107,7 +107,7 @@ describe('Archiver', () => { let blobSinkClient: MockProxy; let epochCache: MockProxy; let archiverStore: ArchiverDataStore; - let l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32 }; + let l1Constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }; let now: number; let mockRollupRead: MockProxy; @@ -168,6 +168,7 @@ describe('Archiver', () => { slotDuration: 24, ethereumSlotDuration: 12, proofSubmissionEpochs: 1, + genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT), }; archiver = new Archiver( diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 3e293ef93deb..75bf6ce9f2ff 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -150,7 +150,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem private readonly blobSinkClient: BlobSinkClientInterface, private readonly epochCache: EpochCache, private readonly instrumentation: ArchiverInstrumentation, - private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32 }, + private readonly l1constants: L1RollupConstants & { l1StartBlockHash: Buffer32; genesisArchiveRoot: Fr }, private readonly log: Logger = createLogger('archiver'), ) { super(); @@ -184,10 +184,11 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem const rollup = new RollupContract(publicClient, config.l1Contracts.rollupAddress); - const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs] = await Promise.all([ + const [l1StartBlock, l1GenesisTime, proofSubmissionEpochs, genesisArchiveRoot] = await Promise.all([ rollup.getL1StartBlock(), rollup.getL1GenesisTime(), rollup.getProofSubmissionEpochs(), + rollup.getGenesisArchiveTreeRoot(), ] as const); const l1StartBlockHash = await publicClient @@ -204,6 +205,7 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem slotDuration, ethereumSlotDuration, proofSubmissionEpochs: Number(proofSubmissionEpochs), + genesisArchiveRoot: Fr.fromHexString(genesisArchiveRoot), }; const opts = merge({ pollingIntervalMs: 10_000, batchSize: 100 }, mapArchiverConfig(config)); @@ -977,6 +979,10 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem return Promise.resolve(this.l1constants); } + public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> { + return Promise.resolve({ genesisArchiveRoot: this.l1constants.genesisArchiveRoot }); + } + public getRollupAddress(): Promise { return Promise.resolve(this.l1Addresses.rollupAddress); } @@ -1097,6 +1103,22 @@ export class Archiver extends (EventEmitter as new () => ArchiverEmitter) implem return limitWithProven === 0 ? [] : await this.store.getPublishedBlocks(from, limitWithProven); } + public getPublishedBlockByHash(blockHash: Fr): Promise { + return this.store.getPublishedBlockByHash(blockHash); + } + + public getPublishedBlockByArchive(archive: Fr): Promise { + return this.store.getPublishedBlockByArchive(archive); + } + + public getBlockHeaderByHash(blockHash: Fr): Promise { + return this.store.getBlockHeaderByHash(blockHash); + } + + public getBlockHeaderByArchive(archive: Fr): Promise { + return this.store.getBlockHeaderByArchive(archive); + } + /** * Gets an l2 block. * @param number - The block number to return. @@ -1592,9 +1614,21 @@ export class ArchiverStoreHelper getPublishedBlock(number: number): Promise { return this.store.getPublishedBlock(number); } + getPublishedBlockByHash(blockHash: Fr): Promise { + return this.store.getPublishedBlockByHash(blockHash); + } + getPublishedBlockByArchive(archive: Fr): Promise { + return this.store.getPublishedBlockByArchive(archive); + } getBlockHeaders(from: number, limit: number): Promise { return this.store.getBlockHeaders(from, limit); } + getBlockHeaderByHash(blockHash: Fr): Promise { + return this.store.getBlockHeaderByHash(blockHash); + } + getBlockHeaderByArchive(archive: Fr): Promise { + return this.store.getBlockHeaderByArchive(archive); + } getTxEffect(txHash: TxHash): Promise { return this.store.getTxEffect(txHash); } diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 5bedb316eabb..9a0c058be995 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -61,6 +61,18 @@ export interface ArchiverDataStore { */ getPublishedBlock(number: number): Promise; + /** + * Returns the block for the given hash, or undefined if not exists. + * @param blockHash - The block hash to return. + */ + getPublishedBlockByHash(blockHash: Fr): Promise; + + /** + * Returns the block for the given archive root, or undefined if not exists. + * @param archive - The archive root to return. + */ + getPublishedBlockByArchive(archive: Fr): Promise; + /** * Gets up to `limit` amount of published L2 blocks starting from `from`. * @param from - Number of the first block to return (inclusive). @@ -77,6 +89,18 @@ export interface ArchiverDataStore { */ getBlockHeaders(from: number, limit: number): Promise; + /** + * Returns the block header for the given hash, or undefined if not exists. + * @param blockHash - The block hash to return. + */ + getBlockHeaderByHash(blockHash: Fr): Promise; + + /** + * Returns the block header for the given archive root, or undefined if not exists. + * @param archive - The archive root to return. + */ + getBlockHeaderByArchive(archive: Fr): Promise; + /** * Gets a tx effect. * @param txHash - The hash of the tx corresponding to the tx effect. diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 29369d6a1bce..a542949ad2bb 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -143,6 +143,28 @@ export function describeArchiverDataStore( await store.addBlocks(blocks); await expect(store.unwindBlocks(5, 1)).rejects.toThrow(/can only unwind blocks from the tip/i); }); + + it('unwound blocks and headers cannot be retrieved by hash or archive', async () => { + await store.addBlocks(blocks); + const lastBlock = blocks[blocks.length - 1]; + const blockHash = await lastBlock.block.hash(); + const archive = lastBlock.block.archive.root; + + // Verify block and header exist before unwinding + expect(await store.getPublishedBlockByHash(blockHash)).toBeDefined(); + expect(await store.getPublishedBlockByArchive(archive)).toBeDefined(); + expect(await store.getBlockHeaderByHash(blockHash)).toBeDefined(); + expect(await store.getBlockHeaderByArchive(archive)).toBeDefined(); + + // Unwind the block + await store.unwindBlocks(lastBlock.block.number, 1); + + // Verify neither block nor header can be retrieved after unwinding + expect(await store.getPublishedBlockByHash(blockHash)).toBeUndefined(); + expect(await store.getPublishedBlockByArchive(archive)).toBeUndefined(); + expect(await store.getBlockHeaderByHash(blockHash)).toBeUndefined(); + expect(await store.getBlockHeaderByArchive(archive)).toBeUndefined(); + }); }); describe('getBlocks', () => { @@ -180,6 +202,86 @@ export function describeArchiverDataStore( }); }); + describe('getPublishedBlockByHash', () => { + beforeEach(async () => { + await store.addBlocks(blocks); + }); + + it('retrieves a block by its hash', async () => { + const expectedBlock = blocks[5]; + const blockHash = await expectedBlock.block.hash(); + const retrievedBlock = await store.getPublishedBlockByHash(blockHash); + + expect(retrievedBlock).toBeDefined(); + expectBlocksEqual([retrievedBlock!], [expectedBlock]); + }); + + it('returns undefined for non-existent block hash', async () => { + const nonExistentHash = Fr.random(); + await expect(store.getPublishedBlockByHash(nonExistentHash)).resolves.toBeUndefined(); + }); + }); + + describe('getPublishedBlockByArchive', () => { + beforeEach(async () => { + await store.addBlocks(blocks); + }); + + it('retrieves a block by its archive root', async () => { + const expectedBlock = blocks[3]; + const archive = expectedBlock.block.archive.root; + const retrievedBlock = await store.getPublishedBlockByArchive(archive); + + expect(retrievedBlock).toBeDefined(); + expectBlocksEqual([retrievedBlock!], [expectedBlock]); + }); + + it('returns undefined for non-existent archive root', async () => { + const nonExistentArchive = Fr.random(); + await expect(store.getPublishedBlockByArchive(nonExistentArchive)).resolves.toBeUndefined(); + }); + }); + + describe('getBlockHeaderByHash', () => { + beforeEach(async () => { + await store.addBlocks(blocks); + }); + + it('retrieves a block header by its hash', async () => { + const expectedBlock = blocks[7]; + const blockHash = await expectedBlock.block.hash(); + const retrievedHeader = await store.getBlockHeaderByHash(blockHash); + + expect(retrievedHeader).toBeDefined(); + expect(retrievedHeader!.equals(expectedBlock.block.header)).toBe(true); + }); + + it('returns undefined for non-existent block hash', async () => { + const nonExistentHash = Fr.random(); + await expect(store.getBlockHeaderByHash(nonExistentHash)).resolves.toBeUndefined(); + }); + }); + + describe('getBlockHeaderByArchive', () => { + beforeEach(async () => { + await store.addBlocks(blocks); + }); + + it('retrieves a block header by its archive root', async () => { + const expectedBlock = blocks[2]; + const archive = expectedBlock.block.archive.root; + const retrievedHeader = await store.getBlockHeaderByArchive(archive); + + expect(retrievedHeader).toBeDefined(); + expect(retrievedHeader!.equals(expectedBlock.block.header)).toBe(true); + }); + + it('returns undefined for non-existent archive root', async () => { + const nonExistentArchive = Fr.random(); + await expect(store.getBlockHeaderByArchive(nonExistentArchive)).resolves.toBeUndefined(); + }); + }); + describe('getSyncedL2BlockNumber', () => { it('returns the block number before INITIAL_L2_BLOCK_NUM if no blocks have been added', async () => { await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(INITIAL_L2_BLOCK_NUM - 1); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index 0cf29fd02e07..b5b7d270685d 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -66,6 +66,12 @@ export class BlockStore { /** Index mapping a contract's address (as a string) to its location in a block */ #contractIndex: AztecAsyncMap; + /** Index mapping block hash to block number */ + #blockHashIndex: AztecAsyncMap; + + /** Index mapping block archive to block number */ + #blockArchiveIndex: AztecAsyncMap; + #log = createLogger('archiver:block_store'); constructor(private db: AztecAsyncKVStore) { @@ -73,6 +79,8 @@ export class BlockStore { this.#blockTxs = db.openMap('archiver_block_txs'); this.#txEffects = db.openMap('archiver_tx_effects'); this.#contractIndex = db.openMap('archiver_contract_index'); + this.#blockHashIndex = db.openMap('archiver_block_hash_index'); + this.#blockArchiveIndex = db.openMap('archiver_block_archive_index'); this.#lastSynchedL1Block = db.openSingleton('archiver_last_synched_l1_block'); this.#lastProvenL2Block = db.openSingleton('archiver_last_proven_l2_block'); this.#pendingChainValidationStatus = db.openSingleton('archiver_pending_chain_validation_status'); @@ -132,6 +140,10 @@ export class BlockStore { blockHash.toString(), Buffer.concat(block.block.body.txEffects.map(tx => tx.txHash.toBuffer())), ); + + // Update indices for block hash and archive + await this.#blockHashIndex.set(blockHash.toString(), block.block.number); + await this.#blockArchiveIndex.set(block.block.archive.root.toString(), block.block.number); } await this.#lastSynchedL1Block.set(blocks[blocks.length - 1].l1.blockNumber); @@ -170,6 +182,11 @@ export class BlockStore { await Promise.all(block.block.body.txEffects.map(tx => this.#txEffects.delete(tx.txHash.toString()))); const blockHash = (await block.block.hash()).toString(); await this.#blockTxs.delete(blockHash); + + // Clean up indices + await this.#blockHashIndex.delete(blockHash); + await this.#blockArchiveIndex.delete(block.block.archive.root.toString()); + this.#log.debug(`Unwound block ${blockNumber} ${blockHash}`); } @@ -205,6 +222,66 @@ export class BlockStore { return this.getBlockFromBlockStorage(blockNumber, blockStorage); } + /** + * Gets an L2 block by its hash. + * @param blockHash - The hash of the block to return. + * @returns The requested L2 block. + */ + async getBlockByHash(blockHash: L2BlockHash): Promise { + const blockNumber = await this.#blockHashIndex.getAsync(blockHash.toString()); + if (blockNumber === undefined) { + return undefined; + } + return this.getBlock(blockNumber); + } + + /** + * Gets an L2 block by its archive root. + * @param archive - The archive root of the block to return. + * @returns The requested L2 block. + */ + async getBlockByArchive(archive: Fr): Promise { + const blockNumber = await this.#blockArchiveIndex.getAsync(archive.toString()); + if (blockNumber === undefined) { + return undefined; + } + return this.getBlock(blockNumber); + } + + /** + * Gets a block header by its hash. + * @param blockHash - The hash of the block to return. + * @returns The requested block header. + */ + async getBlockHeaderByHash(blockHash: L2BlockHash): Promise { + const blockNumber = await this.#blockHashIndex.getAsync(blockHash.toString()); + if (blockNumber === undefined) { + return undefined; + } + const blockStorage = await this.#blocks.getAsync(blockNumber); + if (!blockStorage || !blockStorage.header) { + return undefined; + } + return BlockHeader.fromBuffer(blockStorage.header); + } + + /** + * Gets a block header by its archive root. + * @param archive - The archive root of the block to return. + * @returns The requested block header. + */ + async getBlockHeaderByArchive(archive: Fr): Promise { + const blockNumber = await this.#blockArchiveIndex.getAsync(archive.toString()); + if (blockNumber === undefined) { + return undefined; + } + const blockStorage = await this.#blocks.getAsync(blockNumber); + if (!blockStorage || !blockStorage.header) { + return undefined; + } + return BlockHeader.fromBuffer(blockStorage.header); + } + /** * Gets the headers for a sequence of L2 blocks. * @param start - Number of the first block to return (inclusive). diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index c1e01a28976d..39200bb0cce9 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -5,7 +5,7 @@ import { createLogger } from '@aztec/foundation/log'; import type { AztecAsyncKVStore, CustomRange, StoreSize } from '@aztec/kv-store'; import { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { L2Block, ValidateBlockResult } from '@aztec/stdlib/block'; +import { type L2Block, L2BlockHash, type ValidateBlockResult } from '@aztec/stdlib/block'; import type { ContractClassPublic, ContractDataSource, @@ -204,6 +204,14 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc return this.#blockStore.getBlock(number); } + getPublishedBlockByHash(blockHash: Fr): Promise { + return this.#blockStore.getBlockByHash(L2BlockHash.fromField(blockHash)); + } + + getPublishedBlockByArchive(archive: Fr): Promise { + return this.#blockStore.getBlockByArchive(archive); + } + /** * Gets up to `limit` amount of L2 blocks starting from `from`. * @@ -226,6 +234,14 @@ export class KVArchiverDataStore implements ArchiverDataStore, ContractDataSourc return toArray(this.#blockStore.getBlockHeaders(start, limit)); } + getBlockHeaderByHash(blockHash: Fr): Promise { + return this.#blockStore.getBlockHeaderByHash(L2BlockHash.fromField(blockHash)); + } + + getBlockHeaderByArchive(archive: Fr): Promise { + return this.#blockStore.getBlockHeaderByArchive(archive); + } + /** * Gets a tx effect. * @param txHash - The hash of the tx corresponding to the tx effect. diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index 14b089ff5f07..8ce9776754c3 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -1,7 +1,8 @@ +import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants'; import { DefaultL1ContractsConfig } from '@aztec/ethereum'; import { Buffer32 } from '@aztec/foundation/buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; -import type { Fr } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; @@ -126,6 +127,57 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { ); } + public async getPublishedBlockByHash(blockHash: Fr): Promise { + for (const block of this.l2Blocks) { + const hash = await block.hash(); + if (hash.equals(blockHash)) { + return PublishedL2Block.fromFields({ + block, + l1: { + blockNumber: BigInt(block.number), + blockHash: Buffer32.random().toString(), + timestamp: BigInt(block.number), + }, + attestations: [], + }); + } + } + return undefined; + } + + public getPublishedBlockByArchive(archive: Fr): Promise { + const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); + if (!block) { + return Promise.resolve(undefined); + } + return Promise.resolve( + PublishedL2Block.fromFields({ + block, + l1: { + blockNumber: BigInt(block.number), + blockHash: Buffer32.random().toString(), + timestamp: BigInt(block.number), + }, + attestations: [], + }), + ); + } + + public async getBlockHeaderByHash(blockHash: Fr): Promise { + for (const block of this.l2Blocks) { + const hash = await block.hash(); + if (hash.equals(blockHash)) { + return block.header; + } + } + return undefined; + } + + public getBlockHeaderByArchive(archive: Fr): Promise { + const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); + return Promise.resolve(block?.header); + } + getBlockHeader(number: number | 'latest'): Promise { return Promise.resolve(this.l2Blocks.at(typeof number === 'number' ? number - 1 : -1)?.header); } @@ -231,6 +283,10 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { return Promise.resolve(EmptyL1RollupConstants); } + getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> { + return Promise.resolve({ genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT) }); + } + getL1Timestamp(): Promise { throw new Error('Method not implemented.'); } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index d5f1d848c4c4..808e31d4758d 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -548,6 +548,26 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { return await this.blockSource.getBlock(blockNumber); } + /** + * Get a block specified by its hash. + * @param blockHash - The block hash being requested. + * @returns The requested block. + */ + public async getBlockByHash(blockHash: Fr): Promise { + const publishedBlock = await this.blockSource.getPublishedBlockByHash(blockHash); + return publishedBlock?.block; + } + + /** + * Get a block specified by its archive root. + * @param archive - The archive root being requested. + * @returns The requested block. + */ + public async getBlockByArchive(archive: Fr): Promise { + const publishedBlock = await this.blockSource.getPublishedBlockByArchive(archive); + return publishedBlock?.block; + } + /** * Method to request blocks. Will attempt to return all requested blocks but will return only those available. * @param from - The start of the range of blocks to return. @@ -1056,6 +1076,24 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, Traceable { : this.blockSource.getBlockHeader(blockNumber); } + /** + * Get a block header specified by its hash. + * @param blockHash - The block hash being requested. + * @returns The requested block header. + */ + public async getBlockHeaderByHash(blockHash: Fr): Promise { + return await this.blockSource.getBlockHeaderByHash(blockHash); + } + + /** + * Get a block header specified by its archive root. + * @param archive - The archive root being requested. + * @returns The requested block header. + */ + public async getBlockHeaderByArchive(archive: Fr): Promise { + return await this.blockSource.getBlockHeaderByArchive(archive); + } + /** * Simulates the public part of a transaction with the current state. * @param tx - The transaction to simulate. diff --git a/yarn-project/aztec-node/src/sentinel/sentinel.ts b/yarn-project/aztec-node/src/sentinel/sentinel.ts index dc3c43b86639..b46cd8cd47e9 100644 --- a/yarn-project/aztec-node/src/sentinel/sentinel.ts +++ b/yarn-project/aztec-node/src/sentinel/sentinel.ts @@ -321,8 +321,10 @@ export class Sentinel extends (EventEmitter as new () => WatcherEmitter) impleme // (contains the ones synced from mined blocks, which we may have missed from p2p). const block = this.slotNumberToBlock.get(slot); const p2pAttested = await this.p2p.getAttestationsForSlot(slot, block?.archive); + // Filter out attestations with invalid signatures + const p2pAttestors = p2pAttested.map(a => a.getSender()).filter((s): s is EthAddress => s !== undefined); const attestors = new Set( - [...p2pAttested.map(a => a.getSender().toString()), ...(block?.attestors.map(a => a.toString()) ?? [])].filter( + [...p2pAttestors.map(a => a.toString()), ...(block?.attestors.map(a => a.toString()) ?? [])].filter( addr => proposer.toString() !== addr, // Exclude the proposer from the attestors ), ); diff --git a/yarn-project/cli/src/config/chain_l2_config.ts b/yarn-project/cli/src/config/chain_l2_config.ts index 094dc39b99fa..3cdd1a1824fc 100644 --- a/yarn-project/cli/src/config/chain_l2_config.ts +++ b/yarn-project/cli/src/config/chain_l2_config.ts @@ -285,7 +285,7 @@ export const testnetL2ChainConfig: L2ChainConfig = { ...DefaultNetworkDBMapSizeConfig, }; -export const ignitionL2ChainConfig: L2ChainConfig = { +export const mainnetL2ChainConfig: L2ChainConfig = { l1ChainId: 1, testAccounts: false, sponsoredFPC: false, @@ -295,9 +295,9 @@ export const ignitionL2ChainConfig: L2ChainConfig = { seqMinTxsPerBlock: 0, seqMaxTxsPerBlock: 0, realProofs: true, - snapshotsUrls: [`${SNAPSHOTS_URL}/ignition/`], + snapshotsUrls: [`${SNAPSHOTS_URL}/mainnet/`], autoUpdate: 'notify', - autoUpdateUrl: 'https://storage.googleapis.com/aztec-testnet/auto-update/ignition.json', + autoUpdateUrl: 'https://storage.googleapis.com/aztec-mainnet/auto-update/mainnet.json', maxTxPoolSize: 100_000_000, // 100MB publicIncludeMetrics, publicMetricsCollectorUrl: 'https://telemetry.alpha-testnet.aztec-labs.com/v1/metrics', @@ -375,8 +375,8 @@ export function getL2ChainConfig(networkName: NetworkNames): L2ChainConfig | und config = { ...testnetL2ChainConfig }; } else if (networkName === 'staging-ignition') { config = { ...stagingIgnitionL2ChainConfig }; - } else if (networkName === 'ignition') { - config = { ...ignitionL2ChainConfig }; + } else if (networkName === 'mainnet') { + config = { ...mainnetL2ChainConfig }; } return config; } diff --git a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts index c8a234ac855a..cd939f940eca 100644 --- a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts +++ b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts @@ -135,11 +135,11 @@ describe('e2e_multi_validator_node', () => { const payload = ConsensusPayload.fromBlock(block.block); const attestations = block.attestations .filter(a => !a.signature.isEmpty()) - .map(a => new BlockAttestation(block.block.number, payload, a.signature, Signature.empty())); + .map(a => new BlockAttestation(payload, a.signature, Signature.empty())); expect(attestations.length).toBeGreaterThanOrEqual((COMMITTEE_SIZE * 2) / 3 + 1); - const signers = attestations.map(att => att.getSender().toString()); + const signers = attestations.map(att => att.getSender()!.toString()); expect(signers.every(s => validatorAddresses.includes(s))).toBe(true); }); @@ -192,11 +192,11 @@ describe('e2e_multi_validator_node', () => { const payload = ConsensusPayload.fromBlock(block.block); const attestations = block.attestations .filter(a => !a.signature.isEmpty()) - .map(a => new BlockAttestation(block.block.number, payload, a.signature, Signature.empty())); + .map(a => new BlockAttestation(payload, a.signature, Signature.empty())); expect(attestations.length).toBeGreaterThanOrEqual((COMMITTEE_SIZE * 2) / 3 + 1); - const signers = attestations.map(att => att.getSender().toString()); + const signers = attestations.map(att => att.getSender()!.toString()); expect(signers).toEqual(expect.arrayContaining(validatorAddresses.slice(0, COMMITTEE_SIZE))); }); diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index a37b9cc7ff85..aadbaff493cb 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -174,8 +174,8 @@ describe('e2e_p2p_network', () => { const payload = ConsensusPayload.fromBlock(block.block); const attestations = block.attestations .filter(a => !a.signature.isEmpty()) - .map(a => new BlockAttestation(blockNumber, payload, a.signature, Signature.empty())); - const signers = await Promise.all(attestations.map(att => att.getSender().toString())); + .map(a => new BlockAttestation(payload, a.signature, Signature.empty())); + const signers = await Promise.all(attestations.map(att => att.getSender()!.toString())); t.logger.info(`Attestation signers`, { signers }); // Check that the signers found are part of the proposer nodes to ensure the archiver fetched them right diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts index 6dc2f508624e..f9e2e136ba5f 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts @@ -226,8 +226,8 @@ describe('e2e_p2p_network', () => { const payload = ConsensusPayload.fromBlock(block.block); const attestations = block.attestations .filter(a => !a.signature.isEmpty()) - .map(a => new BlockAttestation(blockNumber, payload, a.signature, Signature.empty())); - const signers = await Promise.all(attestations.map(att => att.getSender().toString())); + .map(a => new BlockAttestation(payload, a.signature, Signature.empty())); + const signers = await Promise.all(attestations.map(att => att.getSender()!.toString())); t.logger.info(`Attestation signers`, { signers }); // Check that the signers found are part of the proposer nodes to ensure the archiver fetched them right diff --git a/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts index 5070106bd104..1e7ffd2bbece 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts @@ -358,8 +358,8 @@ describe('e2e_p2p_preferred_network', () => { const payload = ConsensusPayload.fromBlock(block.block); const attestations = block.attestations .filter(a => !a.signature.isEmpty()) - .map(a => new BlockAttestation(blockNumber, payload, a.signature, Signature.empty())); - const signers = await Promise.all(attestations.map(att => att.getSender().toString())); + .map(a => new BlockAttestation(payload, a.signature, Signature.empty())); + const signers = await Promise.all(attestations.map(att => att.getSender()!.toString())); t.logger.info(`Attestation signers`, { signers }); expect(signers.length).toEqual(validators.length); diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index 7e6f5e712186..17c3dfa29796 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -1,11 +1,13 @@ import type { AztecNodeService } from '@aztec/aztec-node'; import { Fr, type SentTx, Tx, sleep } from '@aztec/aztec.js'; import { times } from '@aztec/foundation/collection'; +import { unfreeze } from '@aztec/foundation/types'; +import type { LibP2PService, P2PClient } from '@aztec/p2p'; import type { BlockBuilder } from '@aztec/sequencer-client'; import type { PublicTxResult, PublicTxSimulator } from '@aztec/simulator/server'; import { BlockProposal, SignatureDomainSeparator, getHashedSignaturePayload } from '@aztec/stdlib/p2p'; import { ReExFailedTxsError, ReExStateMismatchError, ReExTimeoutError } from '@aztec/stdlib/validators'; -import type { ValidatorClient } from '@aztec/validator-client'; +import type { ValidatorClient, ValidatorKeyStore } from '@aztec/validator-client'; import { describe, it, jest } from '@jest/globals'; import fs from 'fs'; @@ -121,25 +123,30 @@ describe('e2e_p2p_reex', () => { // Make sure the nodes submit faulty proposals, in this case a faulty proposal is one where we remove one of the transactions // Such that the calculated archive will be different! const interceptBroadcastProposal = (node: AztecNodeService) => { - jest.spyOn((node as any).p2pClient, 'broadcastProposal').mockImplementation(async (...args: unknown[]) => { + const p2pClient = (node as any).p2pClient as P2PClient; + jest.spyOn(p2pClient, 'broadcastProposal').mockImplementation(async (...args: unknown[]) => { // We remove one of the transactions, therefore the block root will be different! const proposal = args[0] as BlockProposal; + const proposerAddress = proposal.getSender(); const txHashes = proposal.txHashes; - // We need to mutate the proposal, so we cast to any - (proposal as any).txHashes = txHashes.slice(0, txHashes.length - 1); + // Mutate txhashes to remove the last one + unfreeze(proposal).txHashes = txHashes.slice(0, txHashes.length - 1); // We sign over the proposal using the node's signing key - // Abusing javascript to access the nodes signing key - const signer = (node as any).sequencer.sequencer.validatorClient.validationService.keyStore; + const signer = (node as any).sequencer.sequencer.validatorClient.validationService + .keyStore as ValidatorKeyStore; const newProposal = new BlockProposal( - proposal.blockNumber, proposal.payload, - await signer.signMessage(getHashedSignaturePayload(proposal.payload, SignatureDomainSeparator.blockProposal)), + await signer.signMessageWithAddress( + proposerAddress!, + getHashedSignaturePayload(proposal.payload, SignatureDomainSeparator.blockProposal), + ), proposal.txHashes, ); - return (node as any).p2pClient.p2pService.propagate(newProposal); + const p2pService = (p2pClient as any).p2pService as LibP2PService; + return p2pService.propagate(newProposal); }); }; diff --git a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts index 5523f9974e8f..5b6525487352 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/upgrade_governance_proposer.test.ts @@ -68,7 +68,7 @@ describe('e2e_p2p_governance_proposer', () => { } }); - it('Should cast votes to upgrade governanceProposer', async () => { + it('should cast votes to upgrade governanceProposer', async () => { // create the bootstrap node for the network if (!t.bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); diff --git a/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.parallel.test.ts b/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.parallel.test.ts new file mode 100644 index 000000000000..61dcf5665014 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.parallel.test.ts @@ -0,0 +1,219 @@ +import { AztecAddress, EthAddress, Fr, type Logger, type Wallet, retryUntil, sleep } from '@aztec/aztec.js'; +import { CheatCodes } from '@aztec/aztec/testing'; +import type { BlobSinkServer } from '@aztec/blob-sink/server'; +import { + type DeployL1ContractsReturnType, + GovernanceProposerContract, + RollupContract, + deployL1Contract, +} from '@aztec/ethereum'; +import { ChainMonitor } from '@aztec/ethereum/test'; +import { times } from '@aztec/foundation/collection'; +import { SecretValue } from '@aztec/foundation/config'; +import { TimeoutError } from '@aztec/foundation/error'; +import { bufferToHex } from '@aztec/foundation/string'; +import type { TestDateProvider } from '@aztec/foundation/timer'; +import { NewGovernanceProposerPayloadAbi } from '@aztec/l1-artifacts/NewGovernanceProposerPayloadAbi'; +import { NewGovernanceProposerPayloadBytecode } from '@aztec/l1-artifacts/NewGovernanceProposerPayloadBytecode'; +import { TestContract } from '@aztec/noir-test-contracts.js/Test'; +import type { AztecNode, AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; + +import { jest } from '@jest/globals'; +import { privateKeyToAccount } from 'viem/accounts'; + +import { getPrivateKeyFromIndex, setup } from '../fixtures/utils.js'; + +const ETHEREUM_SLOT_DURATION = 8; +const AZTEC_SLOT_DURATION = 16; +const TXS_PER_BLOCK = 1; +const ROUND_SIZE = 2; +const QUORUM_SIZE = 2; +// Can't use 48 without chunking the addValidators call. +const COMMITTEE_SIZE = 16; + +jest.setTimeout(1000 * 60 * 5); + +describe('e2e_gov_proposal', () => { + let logger: Logger; + let teardown: () => Promise; + let wallet: Wallet; + let defaultAccountAddress: AztecAddress; + let aztecNode: AztecNode | undefined; + let aztecNodeAdmin: AztecNodeAdmin | undefined; + let deployL1ContractsValues: DeployL1ContractsReturnType; + let cheatCodes: CheatCodes; + let blobSink: BlobSinkServer | undefined; + let dateProvider: TestDateProvider | undefined; + let rollup: RollupContract; + let governanceProposer: GovernanceProposerContract; + let newGovernanceProposerAddress: EthAddress; + let testContract: TestContract; + + beforeEach(async () => { + const validatorOffset = 10; + const validators = times(COMMITTEE_SIZE, i => { + const privateKey = bufferToHex(getPrivateKeyFromIndex(i + validatorOffset)!); + const account = privateKeyToAccount(privateKey); + const address = EthAddress.fromString(account.address); + return { attester: address, withdrawer: address, privateKey }; + }); + + let accounts: AztecAddress[] = []; + const context = await setup(1, { + anvilAccounts: 100, + aztecTargetCommitteeSize: COMMITTEE_SIZE, + initialValidators: validators.map(v => ({ ...v, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) })), + validatorPrivateKeys: new SecretValue(validators.map(v => v.privateKey)), // sequencer runs with all validator keys + governanceProposerRoundSize: ROUND_SIZE, + governanceProposerQuorum: QUORUM_SIZE, + ethereumSlotDuration: ETHEREUM_SLOT_DURATION, + aztecSlotDuration: AZTEC_SLOT_DURATION, + aztecProofSubmissionEpochs: 128, // no pruning + salt: 420, + minTxsPerBlock: TXS_PER_BLOCK, + enforceTimeTable: true, + automineL1Setup: true, // speed up setup + }); + + ({ + teardown, + logger, + wallet, + aztecNode, + aztecNodeAdmin, + deployL1ContractsValues, + cheatCodes, + dateProvider, + accounts, + blobSink, + } = context); + defaultAccountAddress = accounts[0]; + + // Get contract wrappers + const { l1Client, l1ContractAddresses } = deployL1ContractsValues; + const { registryAddress, gseAddress, governanceProposerAddress } = l1ContractAddresses; + rollup = RollupContract.getFromL1ContractsValues(deployL1ContractsValues); + governanceProposer = new GovernanceProposerContract(l1Client, governanceProposerAddress.toString()); + + // Deploy new governance proposer payload + const deployment = await deployL1Contract( + l1Client, + NewGovernanceProposerPayloadAbi, + NewGovernanceProposerPayloadBytecode, + [registryAddress.toString(), gseAddress!.toString()], + { salt: '0x2a' }, + ); + newGovernanceProposerAddress = deployment.address; + logger.warn(`Deployed new governance proposer at ${newGovernanceProposerAddress}`); + + // Deploy a test contract to send msgs via the outbox, since this increases + // gas cost of a proposal, which has triggered oog errors in the past. + testContract = await TestContract.deploy(wallet).send({ from: defaultAccountAddress }).deployed(); + logger.warn(`Deployed test contract at ${testContract.address}`); + }); + + afterEach(() => teardown()); + + /** Sets up voting for the next round by warping to the beginning of the round */ + const setupVotingRound = async () => { + const roundDuration = await governanceProposer.getRoundSize(); + expect(roundDuration).toEqual(BigInt(ROUND_SIZE)); + + const slot = await rollup.getSlotNumber(); + const round = await governanceProposer.computeRound(slot); + const nextRoundBeginsAtSlot = (slot / roundDuration) * roundDuration + roundDuration; + const nextRoundBeginsAtTimestamp = await rollup.getTimestampForSlot(nextRoundBeginsAtSlot); + + logger.warn(`Warping to round ${round + 1n} at slot ${nextRoundBeginsAtSlot}`, { + nextRoundBeginsAtSlot, + nextRoundBeginsAtTimestamp, + roundDuration, + slot, + round, + }); + + // We warp to one L1 slot before the start of the slot, since that's when we start building the L2 block + await cheatCodes.eth.warp(Number(nextRoundBeginsAtTimestamp) - ETHEREUM_SLOT_DURATION, { + resetBlockInterval: true, + updateDateProvider: dateProvider, + }); + + return { round, roundDuration, nextRoundBeginsAtSlot }; + }; + + /** Verifies that the expected number of votes were cast for the governance proposal */ + const verifyVotes = async (round: bigint, expectedMinVotes: bigint) => { + const signals = await governanceProposer.getPayloadSignals( + rollup.address, + round + 1n, + newGovernanceProposerAddress.toString(), + ); + expect(signals).toBeGreaterThanOrEqual(expectedMinVotes); + }; + + it('should propose blocks while voting', async () => { + await aztecNodeAdmin!.setConfig({ + governanceProposerPayload: newGovernanceProposerAddress, + maxTxsPerBlock: TXS_PER_BLOCK, + }); + + const { round, roundDuration } = await setupVotingRound(); + + // Now we submit a bunch of transactions to the PXE. + // We know that this will last at least as long as the round duration, + // since we wait for the txs to be mined, and do so `roundDuration` times. + // Simultaneously, we should be voting for the proposal in every slot. + for (let i = 0; i < roundDuration; i++) { + const txs = times(TXS_PER_BLOCK, () => + testContract.methods + .create_l2_to_l1_message_arbitrary_recipient_private(Fr.random(), EthAddress.random()) + .send({ from: defaultAccountAddress }), + ); + await Promise.all( + txs.map(async (tx, j) => { + logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + return tx.wait({ timeout: 2 * AZTEC_SLOT_DURATION + 2 }); + }), + ); + } + + logger.warn(`All transactions submitted and mined`); + await verifyVotes(round, roundDuration); + }); + + it('should vote even when unable to build blocks', async () => { + const monitor = new ChainMonitor(rollup, dateProvider).start(); + + // Break the blob sink so no new blocks are synced + blobSink!.setDisableBlobStorage(true); + await sleep(1000); + const lastBlockSynced = await aztecNode!.getBlockNumber(); + logger.warn(`Blob sink is disabled (last block synced is ${lastBlockSynced})`); + + // And send a tx which shouldnt be syncable but does move the block forward + await expect(() => + testContract.methods + .create_l2_to_l1_message_arbitrary_recipient_private(Fr.random(), EthAddress.random()) + .send({ from: defaultAccountAddress }) + .wait({ timeout: AZTEC_SLOT_DURATION + 2 }), + ).rejects.toThrow(TimeoutError); + logger.warn(`Test tx timed out as expected`); + + // Check that the block number has indeed increased on L1 so sequencers cant pass the sync check + expect(await monitor.run().then(b => b.l2BlockNumber)).toBeGreaterThan(lastBlockSynced); + logger.warn(`L2 block number has increased on L1`); + + // Start voting! + await aztecNodeAdmin!.setConfig({ governanceProposerPayload: newGovernanceProposerAddress }); + const { round, roundDuration, nextRoundBeginsAtSlot } = await setupVotingRound(); + + // And wait until the round is over + const nextRoundEndsAtSlot = nextRoundBeginsAtSlot + roundDuration; + const timeout = AZTEC_SLOT_DURATION * Number(roundDuration + 1n) + 20; + logger.warn(`Waiting until slot ${nextRoundEndsAtSlot} for round to end (timeout ${timeout}s)`); + await retryUntil(() => rollup.getSlotNumber().then(s => s > nextRoundEndsAtSlot), 'round end', timeout, 1); + + // We should have voted despite being unable to build blocks + await verifyVotes(round, roundDuration); + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.test.ts b/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.test.ts deleted file mode 100644 index fe7a230fed73..000000000000 --- a/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.test.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { AztecAddress, EthAddress, Fr, type Logger, type Wallet } from '@aztec/aztec.js'; -import { CheatCodes } from '@aztec/aztec/testing'; -import { - type DeployL1ContractsReturnType, - GovernanceProposerContract, - RollupContract, - deployL1Contract, -} from '@aztec/ethereum'; -import { times } from '@aztec/foundation/collection'; -import { SecretValue } from '@aztec/foundation/config'; -import { bufferToHex } from '@aztec/foundation/string'; -import type { TestDateProvider } from '@aztec/foundation/timer'; -import { NewGovernanceProposerPayloadAbi } from '@aztec/l1-artifacts/NewGovernanceProposerPayloadAbi'; -import { NewGovernanceProposerPayloadBytecode } from '@aztec/l1-artifacts/NewGovernanceProposerPayloadBytecode'; -import { TestContract } from '@aztec/noir-test-contracts.js/Test'; -import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; - -import { privateKeyToAccount } from 'viem/accounts'; - -import { getPrivateKeyFromIndex, setup } from '../fixtures/utils.js'; - -const ETHEREUM_SLOT_DURATION = 8; -const AZTEC_SLOT_DURATION = 16; -const TXS_PER_BLOCK = 1; -const ROUND_SIZE = 2; -const QUORUM_SIZE = 2; -// Can't use 48 without chunking the addValidators call. -const COMMITTEE_SIZE = 16; - -describe('e2e_gov_proposal', () => { - let logger: Logger; - let teardown: () => Promise; - let wallet: Wallet; - let defaultAccountAddress: AztecAddress; - let aztecNodeAdmin: AztecNodeAdmin | undefined; - let deployL1ContractsValues: DeployL1ContractsReturnType; - let cheatCodes: CheatCodes; - let dateProvider: TestDateProvider | undefined; - - beforeEach(async () => { - const validatorOffset = 10; - const validators = times(COMMITTEE_SIZE, i => { - const privateKey = bufferToHex(getPrivateKeyFromIndex(i + validatorOffset)!); - const account = privateKeyToAccount(privateKey); - const address = EthAddress.fromString(account.address); - return { attester: address, withdrawer: address, privateKey }; - }); - let accounts: AztecAddress[] = []; - ({ teardown, logger, wallet, aztecNodeAdmin, deployL1ContractsValues, cheatCodes, dateProvider, accounts } = - await setup(1, { - anvilAccounts: 100, - aztecTargetCommitteeSize: COMMITTEE_SIZE, - initialValidators: validators.map(v => ({ ...v, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) })), - validatorPrivateKeys: new SecretValue(validators.map(v => v.privateKey)), // sequencer runs with all validator keys - governanceProposerRoundSize: ROUND_SIZE, - governanceProposerQuorum: QUORUM_SIZE, - ethereumSlotDuration: ETHEREUM_SLOT_DURATION, - aztecSlotDuration: AZTEC_SLOT_DURATION, - aztecProofSubmissionEpochs: 128, // no pruning - salt: 420, - minTxsPerBlock: TXS_PER_BLOCK, - enforceTimeTable: true, - automineL1Setup: true, // speed up setup - })); - defaultAccountAddress = accounts[0]; - }, 3 * 60000); - - afterEach(() => teardown()); - - it( - 'should build/propose blocks while voting', - async () => { - const { l1Client, l1ContractAddresses } = deployL1ContractsValues; - const { registryAddress, rollupAddress, gseAddress, governanceProposerAddress } = l1ContractAddresses; - const rollup = new RollupContract(l1Client, rollupAddress.toString()); - const governanceProposer = new GovernanceProposerContract(l1Client, governanceProposerAddress.toString()); - - const { address: newGovernanceProposerAddress } = await deployL1Contract( - l1Client, - NewGovernanceProposerPayloadAbi, - NewGovernanceProposerPayloadBytecode, - [registryAddress.toString(), gseAddress!.toString()], - { salt: '0x2a' }, - ); - - // Deploy a test contract to send msgs via the outbox, since this increases - // gas cost of a proposal, which has triggered oog errors in the past. - const testContract = await TestContract.deploy(wallet).send({ from: defaultAccountAddress }).deployed(); - - await aztecNodeAdmin!.setConfig({ - governanceProposerPayload: newGovernanceProposerAddress, - maxTxsPerBlock: TXS_PER_BLOCK, - }); - - const roundDuration = await governanceProposer.getRoundSize(); - expect(roundDuration).toEqual(BigInt(ROUND_SIZE)); - const slot = await rollup.getSlotNumber(); - const round = await governanceProposer.computeRound(slot); - const nextRoundBeginsAtSlot = (slot / roundDuration) * roundDuration + roundDuration; - const nextRoundBeginsAtTimestamp = await rollup.getTimestampForSlot(nextRoundBeginsAtSlot); - - logger.warn(`Warping to round ${round + 1n} at slot ${nextRoundBeginsAtSlot}`, { - nextRoundBeginsAtSlot, - nextRoundBeginsAtTimestamp, - roundDuration, - slot, - round, - }); - - await cheatCodes.eth.warp(Number(nextRoundBeginsAtTimestamp), { - resetBlockInterval: true, - updateDateProvider: dateProvider, - }); - - // Now we submit a bunch of transactions to the PXE. - // We know that this will last at least as long as the round duration, - // since we wait for the txs to be mined, and do so `roundDuration` times. - // Simultaneously, we should be voting for the proposal in every slot. - - for (let i = 0; i < roundDuration; i++) { - const txs = times(TXS_PER_BLOCK, () => - testContract.methods - .create_l2_to_l1_message_arbitrary_recipient_private(Fr.random(), EthAddress.random()) - .send({ from: defaultAccountAddress }), - ); - await Promise.all( - txs.map(async (tx, j) => { - logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); - return tx.wait({ timeout: 2 * AZTEC_SLOT_DURATION + 2 }); - }), - ); - } - - logger.warn('All transactions submitted and mined'); - const signals = await governanceProposer.getPayloadSignals( - rollupAddress.toString(), - round + 1n, - newGovernanceProposerAddress.toString(), - ); - expect(signals).toBeGreaterThan(0n); - }, - 1000 * 60 * 5, - ); -}); diff --git a/yarn-project/epoch-cache/src/epoch_cache.ts b/yarn-project/epoch-cache/src/epoch_cache.ts index 635052c3df52..9c5e52f20f02 100644 --- a/yarn-project/epoch-cache/src/epoch_cache.ts +++ b/yarn-project/epoch-cache/src/epoch_cache.ts @@ -244,7 +244,7 @@ export class EpochCache implements EpochCacheInterface { } /** - * Get the proposer attester address in the gien slot + * Get the proposer attester address in the given L2 slot * @returns The proposer attester address. If the committee does not exist, we throw a NoCommitteeError. * If the committee is empty (i.e. target committee size is 0, and anyone can propose), we return undefined. */ diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 31bf73ba5e99..03a30bcfc98d 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -279,6 +279,8 @@ export const deploySharedContracts = async ( const deployedStaking = await deployer.deploy(StakingAssetArtifact, ['Staking', 'STK', l1Client.account.address]); stakingAssetAddress = deployedStaking.address; logger.verbose(`Deployed Staking Asset at ${stakingAssetAddress}`); + + await deployer.waitForDeployments(); } const gseAddress = ( @@ -352,7 +354,7 @@ export const deploySharedContracts = async ( const coinIssuerAddress = ( await deployer.deploy(CoinIssuerArtifact, [ feeAssetAddress.toString(), - (25_000_000_000n * 10n ** 18n) / (60n * 60n * 24n * 365n), + 2n * 10n ** 17n, // hard cap of 20% per year l1Client.account.address, ]) ).address; diff --git a/yarn-project/ethereum/src/zkPassportVerifierAddress.ts b/yarn-project/ethereum/src/zkPassportVerifierAddress.ts index 56e059f7c708..42605c57b38e 100644 --- a/yarn-project/ethereum/src/zkPassportVerifierAddress.ts +++ b/yarn-project/ethereum/src/zkPassportVerifierAddress.ts @@ -4,7 +4,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; * The address of the zk passport verifier on sepolia * get address from: ROOT/l1-contracts/lib/circuits/src/solidity/deployments/deployment-11155111.json */ -export const ZK_PASSPORT_VERIFIER_ADDRESS = EthAddress.fromString('0xBec82dec0747C9170D760D5aba9cc44929B17C05'); +export const ZK_PASSPORT_VERIFIER_ADDRESS = EthAddress.fromString('0x3101Bad9eA5fACadA5554844a1a88F7Fe48D4DE0'); /** * The default domain of the zk passport site */ diff --git a/yarn-project/foundation/src/config/network_name.ts b/yarn-project/foundation/src/config/network_name.ts index 241b612a9da2..fc81ded7f57c 100644 --- a/yarn-project/foundation/src/config/network_name.ts +++ b/yarn-project/foundation/src/config/network_name.ts @@ -1,4 +1,4 @@ -export type NetworkNames = 'local' | 'staging-ignition' | 'staging-public' | 'testnet' | 'ignition'; +export type NetworkNames = 'local' | 'staging-ignition' | 'staging-public' | 'testnet' | 'mainnet'; export function getActiveNetworkName(name?: string): NetworkNames { const network = name || process.env.NETWORK; @@ -10,8 +10,8 @@ export function getActiveNetworkName(name?: string): NetworkNames { return network; } else if (network === 'testnet' || network === 'alpha-testnet') { return 'testnet'; - } else if (network === 'ignition') { - return 'ignition'; + } else if (network === 'mainnet') { + return 'mainnet'; } throw new Error(`Unknown network: ${network}`); } diff --git a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts index dad5db8a74ca..86903a4e5b18 100644 --- a/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts +++ b/yarn-project/foundation/src/crypto/secp256k1-signer/utils.ts @@ -47,6 +47,7 @@ export function addressFromPrivateKey(privateKey: Buffer): EthAddress { * @param hash - The hash to recover the address from. * @param signature - The signature to recover the address from. * @returns The address. + * @throws Error if signature recovery fails. */ export function recoverAddress(hash: Buffer32, signature: Signature): EthAddress { try { @@ -59,6 +60,21 @@ export function recoverAddress(hash: Buffer32, signature: Signature): EthAddress } } +/** + * Safely attempts to recover an address from a hash and a signature. + * @param hash - The hash to recover the address from. + * @param signature - The signature to recover the address from. + * @returns The address if recovery succeeds, undefined otherwise. + */ +export function tryRecoverAddress(hash: Buffer32, signature: Signature): EthAddress | undefined { + try { + const publicKey = recoverPublicKey(hash, signature); + return publicKeyToAddress(publicKey); + } catch { + return undefined; + } +} + /** * @attribution - viem * Converts a yParityOrV value to a recovery bit. diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index cf32d0ba19a1..10f7011205c1 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -123,7 +123,13 @@ export class P2PClient const constants = this.txCollection.getConstants(); const nextSlotTimestampSeconds = Number(getTimestampForSlot(block.slotNumber.toBigInt() + 1n, constants)); const deadline = new Date(nextSlotTimestampSeconds * 1000); - await this.txProvider.getTxsForBlockProposal(block, { pinnedPeer: sender, deadline }); + const parentBlock = await this.l2BlockSource.getBlockHeaderByArchive(block.payload.header.lastArchiveRoot); + if (!parentBlock) { + this.log.debug(`Cannot collect txs for proposal as parent block not found`); + return; + } + const blockNumber = parentBlock.getBlockNumber() + 1; + await this.txProvider.getTxsForBlockProposal(block, blockNumber, { pinnedPeer: sender, deadline }); return undefined; }); @@ -365,7 +371,6 @@ export class P2PClient } @trackSpan('p2pClient.broadcastProposal', async proposal => ({ - [Attributes.BLOCK_NUMBER]: proposal.blockNumber, [Attributes.SLOT_NUMBER]: proposal.slotNumber.toNumber(), [Attributes.BLOCK_ARCHIVE]: proposal.archive.toString(), [Attributes.P2P_ID]: (await proposal.p2pMessageIdentifier()).toString(), diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts index bafded3e7f9c..171bcc162749 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts @@ -41,7 +41,6 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo }; const mockBlockProposal = (signer: Secp256k1Signer, slotNumber: number, archive: Fr = Fr.random()): BlockProposal => { - const blockNumber = 1; const header = makeHeader(1, 2, slotNumber); const payload = new ConsensusPayload(header.toPropose(), archive, header.state); @@ -50,7 +49,7 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo const txHashes = [TxHash.random(), TxHash.random()]; // Mock tx hashes - return new BlockProposalClass(blockNumber, payload, signature, txHashes); + return new BlockProposalClass(payload, signature, txHashes); }; // We compare buffers as the objects can have cached values attached to them which are not serialised @@ -117,7 +116,7 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo const retreivedAttestations = await ap.getAttestationsForSlotAndProposal(BigInt(slotNumber), archive.toString()); expect(retreivedAttestations.length).toBe(1); expect(retreivedAttestations[0].toBuffer()).toEqual(attestations[0].toBuffer()); - expect(retreivedAttestations[0].getSender().toString()).toEqual(signer.address.toString()); + expect(retreivedAttestations[0].getSender()?.toString()).toEqual(signer.address.toString()); // Try adding them on another operation and check they are still not duplicated await ap.addAttestations([attestations[0]]); @@ -291,7 +290,7 @@ export function describeAttestationPool(getAttestationPool: () => AttestationPoo expect(retrievedProposal).toBeDefined(); // Should have the second proposal expect(retrievedProposal!.toBuffer()).toEqual(proposal2.toBuffer()); - expect(retrievedProposal!.getSender().toString()).toBe(signers[1].address.toString()); + expect(retrievedProposal!.getSender()?.toString()).toBe(signers[1].address.toString()); }); it('should handle block proposals with different slots and same archive', async () => { diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.ts index 822ab0772471..7b9c209d29b2 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/kv_attestation_pool.ts @@ -66,7 +66,19 @@ export class KvAttestationPool implements AttestationPool { for (const attestation of attestations) { const slotNumber = attestation.payload.header.slotNumber; const proposalId = attestation.archive; - const address = attestation.getSender().toString(); + const sender = attestation.getSender(); + + // Skip attestations with invalid signatures + if (!sender) { + this.log.warn(`Skipping attestation with invalid signature for slot ${slotNumber.toBigInt()}`, { + signature: attestation.signature.toString(), + slotNumber, + proposalId, + }); + continue; + } + + const address = sender.toString(); await this.attestations.set(this.getAttestationKey(slotNumber, proposalId, address), attestation.toBuffer()); @@ -176,7 +188,15 @@ export class KvAttestationPool implements AttestationPool { for (const attestation of attestations) { const slotNumber = attestation.payload.header.slotNumber; const proposalId = attestation.archive; - const address = attestation.getSender().toString(); + const sender = attestation.getSender(); + + // Skip attestations with invalid signatures + if (!sender) { + this.log.warn(`Skipping deletion of attestation with invalid signature for slot ${slotNumber.toBigInt()}`); + continue; + } + + const address = sender.toString(); const key = this.getAttestationKey(slotNumber, proposalId, address); if (await this.attestations.hasAsync(key)) { diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/memory_attestation_pool.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/memory_attestation_pool.ts index 94eb9cbc87e2..fdcae61f6034 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/memory_attestation_pool.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/memory_attestation_pool.ts @@ -55,16 +55,26 @@ export class InMemoryAttestationPool implements AttestationPool { const slotNumber = attestation.payload.header.slotNumber; const proposalId = attestation.archive.toString(); - const address = attestation.getSender(); + const sender = attestation.getSender(); + + // Skip attestations with invalid signatures + if (!sender) { + this.log.warn(`Skipping attestation with invalid signature for slot ${slotNumber.toBigInt()}`, { + signature: attestation.signature.toString(), + slotNumber, + proposalId, + }); + continue; + } const slotAttestationMap = getSlotOrDefault(this.attestations, slotNumber.toBigInt()); const proposalAttestationMap = getProposalOrDefault(slotAttestationMap, proposalId); - proposalAttestationMap.set(address.toString(), attestation); + proposalAttestationMap.set(sender.toString(), attestation); - this.log.verbose(`Added attestation for slot ${slotNumber.toBigInt()} from ${address}`, { + this.log.verbose(`Added attestation for slot ${slotNumber.toBigInt()} from ${sender}`, { signature: attestation.signature.toString(), slotNumber, - address, + address: sender, proposalId, }); } @@ -147,9 +157,16 @@ export class InMemoryAttestationPool implements AttestationPool { const proposalId = attestation.archive.toString(); const proposalAttestationMap = getProposalOrDefault(slotAttestationMap, proposalId); if (proposalAttestationMap) { - const address = attestation.getSender(); - proposalAttestationMap.delete(address.toString()); - this.log.debug(`Deleted attestation for slot ${slotNumber} from ${address}`); + const sender = attestation.getSender(); + + // Skip attestations with invalid signatures + if (!sender) { + this.log.warn(`Skipping deletion of attestation with invalid signature for slot ${slotNumber.toBigInt()}`); + continue; + } + + proposalAttestationMap.delete(sender.toString()); + this.log.debug(`Deleted attestation for slot ${slotNumber} from ${sender}`); } } } diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts index e343a370bcac..855cc6ec4f8a 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts @@ -41,5 +41,5 @@ export const mockAttestation = ( const proposalHash = getHashedSignaturePayloadEthSignedMessage(payload, SignatureDomainSeparator.blockProposal); const proposerSignature = signer.sign(proposalHash); - return new BlockAttestation(header.globalVariables.blockNumber, payload, attestationSignature, proposerSignature); + return new BlockAttestation(payload, attestationSignature, proposerSignature); }; diff --git a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts index cda994f5d837..f9582c1e745b 100644 --- a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts +++ b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts @@ -26,8 +26,14 @@ export class AttestationValidator implements P2PValidator { return PeerErrorSeverity.HighToleranceError; } - // Verify the attester is in the committee for this slot + // Verify the signature is valid const attester = message.getSender(); + if (attester === undefined) { + this.logger.warn(`Invalid signature in attestation for slot ${slotNumberBigInt}`); + return PeerErrorSeverity.LowToleranceError; + } + + // Verify the attester is in the committee for this slot if (!(await this.epochCache.isInCommittee(slotNumberBigInt, attester))) { this.logger.warn(`Attester ${attester.toString()} is not in committee for slot ${slotNumberBigInt}`); return PeerErrorSeverity.HighToleranceError; @@ -40,6 +46,10 @@ export class AttestationValidator implements P2PValidator { this.logger.warn(`No proposer defined for slot ${slotNumberBigInt}`); return PeerErrorSeverity.HighToleranceError; } + if (!proposer) { + this.logger.warn(`Invalid proposer signature in attestation for slot ${slotNumberBigInt}`); + return PeerErrorSeverity.LowToleranceError; + } if (!proposer.equals(expectedProposer)) { this.logger.warn( `Proposer signature mismatch in attestation. ` + diff --git a/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.test.ts b/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.test.ts index 2bf35473f5ed..6aa16e16ec4f 100644 --- a/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.test.ts @@ -3,6 +3,7 @@ import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; import { makeBlockProposal, makeHeader } from '@aztec/stdlib/testing'; +import { TxHash } from '@aztec/stdlib/tx'; import { mock } from 'jest-mock-extended'; @@ -14,7 +15,7 @@ describe('BlockProposalValidator', () => { beforeEach(() => { epochCache = mock(); - validator = new BlockProposalValidator(epochCache); + validator = new BlockProposalValidator(epochCache, { txsPermitted: true }); }); it('returns high tolerance error if slot number is not current or next slot', async () => { @@ -146,4 +147,75 @@ describe('BlockProposalValidator', () => { const result = await validator.validate(mockProposal); expect(result).toBeUndefined(); }); + + describe('transaction permission validation', () => { + it('returns mid tolerance error if txs not permitted and proposal contains txHashes', async () => { + const currentProposer = Secp256k1Signer.random(); + const validatorWithTxsDisabled = new BlockProposalValidator(epochCache, { txsPermitted: false }); + + // Create a block proposal with transaction hashes + const mockProposal = makeBlockProposal({ + header: makeHeader(1, 100, 100), + signer: currentProposer, + txHashes: [TxHash.random(), TxHash.random()], // Include some tx hashes + }); + + // Mock epoch cache to return valid proposer (so only tx permission check fails) + (epochCache.getProposerAttesterAddressInCurrentOrNextSlot as jest.Mock).mockResolvedValue({ + currentSlot: 100n, + nextSlot: 101n, + currentProposer: currentProposer.address, + nextProposer: Fr.random(), + }); + + const result = await validatorWithTxsDisabled.validate(mockProposal); + expect(result).toBe(PeerErrorSeverity.MidToleranceError); + }); + + it('returns undefined if txs not permitted but proposal has no txHashes', async () => { + const currentProposer = Secp256k1Signer.random(); + const validatorWithTxsDisabled = new BlockProposalValidator(epochCache, { txsPermitted: false }); + + // Create a block proposal without transaction hashes + const mockProposal = makeBlockProposal({ + header: makeHeader(1, 100, 100), + signer: currentProposer, + txHashes: [], // Empty tx hashes array + }); + + // Mock epoch cache for valid case + (epochCache.getProposerAttesterAddressInCurrentOrNextSlot as jest.Mock).mockResolvedValue({ + currentSlot: 100n, + nextSlot: 101n, + currentProposer: currentProposer.address, + nextProposer: Fr.random(), + }); + + const result = await validatorWithTxsDisabled.validate(mockProposal); + expect(result).toBeUndefined(); + }); + + it('returns undefined if txs permitted and proposal contains txHashes', async () => { + const currentProposer = Secp256k1Signer.random(); + // validator already created with txsPermitted = true in beforeEach + + // Create a block proposal with transaction hashes + const mockProposal = makeBlockProposal({ + header: makeHeader(1, 100, 100), + signer: currentProposer, + txHashes: [TxHash.random(), TxHash.random()], // Include some tx hashes + }); + + // Mock epoch cache for valid case + (epochCache.getProposerAttesterAddressInCurrentOrNextSlot as jest.Mock).mockResolvedValue({ + currentSlot: 100n, + nextSlot: 101n, + currentProposer: currentProposer.address, + nextProposer: Fr.random(), + }); + + const result = await validator.validate(mockProposal); + expect(result).toBeUndefined(); + }); + }); }); diff --git a/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.ts b/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.ts index 6008837be381..55311923e919 100644 --- a/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.ts +++ b/yarn-project/p2p/src/msg_validators/block_proposal_validator/block_proposal_validator.ts @@ -6,14 +6,31 @@ import { type BlockProposal, type P2PValidator, PeerErrorSeverity } from '@aztec export class BlockProposalValidator implements P2PValidator { private epochCache: EpochCacheInterface; private logger: Logger; + private txsPermitted: boolean; - constructor(epochCache: EpochCacheInterface) { + constructor(epochCache: EpochCacheInterface, opts: { txsPermitted: boolean }) { this.epochCache = epochCache; + this.txsPermitted = opts.txsPermitted; this.logger = createLogger('p2p:block_proposal_validator'); } async validate(block: BlockProposal): Promise { try { + // Check signature validity first - invalid signatures are a high-severity issue + const proposer = block.getSender(); + if (!proposer) { + this.logger.debug(`Penalizing peer for block proposal with invalid signature`); + return PeerErrorSeverity.MidToleranceError; + } + + // Check if transactions are permitted when the proposal contains transaction hashes + if (!this.txsPermitted && block.txHashes.length > 0) { + this.logger.debug( + `Penalizing peer for block proposal with ${block.txHashes.length} transaction(s) when transactions are not permitted`, + ); + return PeerErrorSeverity.MidToleranceError; + } + const { currentProposer, nextProposer, currentSlot, nextSlot } = await this.epochCache.getProposerAttesterAddressInCurrentOrNextSlot(); @@ -25,12 +42,11 @@ export class BlockProposalValidator implements P2PValidator { } // Check that the block proposal is from the current or next proposer - const proposer = block.getSender(); if (slotNumberBigInt === currentSlot && currentProposer !== undefined && !proposer.equals(currentProposer)) { this.logger.debug(`Penalizing peer for invalid proposer for current slot ${slotNumberBigInt}`, { currentProposer, nextProposer, - proposer, + proposer: proposer.toString(), }); return PeerErrorSeverity.MidToleranceError; } @@ -39,7 +55,7 @@ export class BlockProposalValidator implements P2PValidator { this.logger.debug(`Penalizing peer for invalid proposer for next slot ${slotNumberBigInt}`, { currentProposer, nextProposer, - proposer, + proposer: proposer.toString(), }); return PeerErrorSeverity.MidToleranceError; } diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 58f049b3d92e..9e2d9e394130 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -160,7 +160,7 @@ export class LibP2PService extends ); this.attestationValidator = new AttestationValidator(epochCache); - this.blockProposalValidator = new BlockProposalValidator(epochCache); + this.blockProposalValidator = new BlockProposalValidator(epochCache, { txsPermitted: !config.disableTransactions }); this.gossipSubEventHandler = this.handleGossipSubEvent.bind(this); @@ -745,12 +745,11 @@ export class LibP2PService extends return; } this.logger.debug( - `Received attestation for block ${attestation.blockNumber} slot ${attestation.slotNumber.toNumber()} from external peer ${source.toString()}`, + `Received attestation for slot ${attestation.slotNumber.toNumber()} from external peer ${source.toString()}`, { p2pMessageIdentifier: await attestation.p2pMessageIdentifier(), slot: attestation.slotNumber.toNumber(), archive: attestation.archive.toString(), - block: attestation.blockNumber, source: source.toString(), }, ); @@ -783,7 +782,6 @@ export class LibP2PService extends // REVIEW: callback pattern https://github.com/AztecProtocol/aztec-packages/issues/7963 @trackSpan('Libp2pService.processValidBlockProposal', async block => ({ - [Attributes.BLOCK_NUMBER]: block.blockNumber, [Attributes.SLOT_NUMBER]: block.slotNumber.toNumber(), [Attributes.BLOCK_ARCHIVE]: block.archive.toString(), [Attributes.P2P_ID]: await block.p2pMessageIdentifier().then(i => i.toString()), @@ -791,16 +789,12 @@ export class LibP2PService extends private async processValidBlockProposal(block: BlockProposal, sender: PeerId) { const slot = block.slotNumber.toBigInt(); const previousSlot = slot - 1n; - this.logger.verbose( - `Received block ${block.blockNumber} for slot ${slot} from external peer ${sender.toString()}.`, - { - p2pMessageIdentifier: await block.p2pMessageIdentifier(), - slot: block.slotNumber.toNumber(), - archive: block.archive.toString(), - block: block.blockNumber, - source: sender.toString(), - }, - ); + this.logger.verbose(`Received block proposal for slot ${slot} from external peer ${sender.toString()}.`, { + p2pMessageIdentifier: await block.p2pMessageIdentifier(), + slot: block.slotNumber.toNumber(), + archive: block.archive.toString(), + source: sender.toString(), + }); const attestationsForPreviousSlot = await this.mempools.attestationPool?.getAttestationsForSlot(previousSlot); if (attestationsForPreviousSlot !== undefined) { this.logger.verbose(`Received ${attestationsForPreviousSlot.length} attestations for slot ${previousSlot}`); @@ -815,15 +809,11 @@ export class LibP2PService extends // The attestation can be undefined if no handler is registered / the validator deems the block invalid if (attestations?.length) { for (const attestation of attestations) { - this.logger.verbose( - `Broadcasting attestation for block ${attestation.blockNumber} slot ${attestation.slotNumber.toNumber()}`, - { - p2pMessageIdentifier: await attestation.p2pMessageIdentifier(), - slot: attestation.slotNumber.toNumber(), - archive: attestation.archive.toString(), - block: attestation.blockNumber, - }, - ); + this.logger.verbose(`Broadcasting attestation for slot ${attestation.slotNumber.toNumber()}`, { + p2pMessageIdentifier: await attestation.p2pMessageIdentifier(), + slot: attestation.slotNumber.toNumber(), + archive: attestation.archive.toString(), + }); await this.broadcastAttestation(attestation); } } @@ -834,7 +824,6 @@ export class LibP2PService extends * @param attestation - The attestation to broadcast. */ @trackSpan('Libp2pService.broadcastAttestation', async attestation => ({ - [Attributes.BLOCK_NUMBER]: attestation.blockNumber, [Attributes.SLOT_NUMBER]: attestation.payload.header.slotNumber.toNumber(), [Attributes.BLOCK_ARCHIVE]: attestation.archive.toString(), [Attributes.P2P_ID]: await attestation.p2pMessageIdentifier().then(i => i.toString()), @@ -1080,7 +1069,6 @@ export class LibP2PService extends * @returns True if the attestation is valid, false otherwise. */ @trackSpan('Libp2pService.validateAttestation', async (_, attestation) => ({ - [Attributes.BLOCK_NUMBER]: attestation.blockNumber, [Attributes.SLOT_NUMBER]: attestation.payload.header.slotNumber.toNumber(), [Attributes.BLOCK_ARCHIVE]: attestation.archive.toString(), [Attributes.P2P_ID]: await attestation.p2pMessageIdentifier().then(i => i.toString()), diff --git a/yarn-project/p2p/src/services/peer-manager/peer_manager.ts b/yarn-project/p2p/src/services/peer-manager/peer_manager.ts index c1f3d1424157..3cdad5fbe84d 100644 --- a/yarn-project/p2p/src/services/peer-manager/peer_manager.ts +++ b/yarn-project/p2p/src/services/peer-manager/peer_manager.ts @@ -1,5 +1,5 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import { makeEthSignDigest, recoverAddress } from '@aztec/foundation/crypto'; +import { makeEthSignDigest, tryRecoverAddress } from '@aztec/foundation/crypto'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; @@ -907,7 +907,14 @@ export class PeerManager implements PeerManagerInterface { const hashToRecover = authRequest.getPayloadToSign(); const ethSignedHash = makeEthSignDigest(hashToRecover); - const sender = recoverAddress(ethSignedHash, peerAuthResponse.signature); + const sender = tryRecoverAddress(ethSignedHash, peerAuthResponse.signature); + if (!sender) { + this.logger.verbose(`Disconnecting peer ${peerId} due to failed auth handshake, invalid signature.`, logData); + this.markAuthHandshakeFailed(peerId); + this.markPeerForDisconnect(peerId); + return; + } + const registeredValidators = await this.epochCache.getRegisteredValidators(); const found = registeredValidators.find(v => v.toString() === sender.toString()) !== undefined; if (!found) { diff --git a/yarn-project/p2p/src/services/tx_collection/fast_tx_collection.ts b/yarn-project/p2p/src/services/tx_collection/fast_tx_collection.ts index 75ecc34f0cba..70d175b7fd3d 100644 --- a/yarn-project/p2p/src/services/tx_collection/fast_tx_collection.ts +++ b/yarn-project/p2p/src/services/tx_collection/fast_tx_collection.ts @@ -55,7 +55,9 @@ export class FastTxCollection { } const blockInfo: L2BlockInfo = - input.type === 'proposal' ? input.blockProposal.toBlockInfo() : input.block.toBlockInfo(); + input.type === 'proposal' + ? { ...input.blockProposal.toBlockInfo(), blockNumber: input.blockNumber } + : { ...input.block.toBlockInfo() }; // This promise is used to await for the collection to finish during the main collectFast method. // It gets resolved in `foundTxs` when all txs have been collected, or rejected if the request is aborted or hits the deadline. diff --git a/yarn-project/p2p/src/services/tx_collection/tx_collection.ts b/yarn-project/p2p/src/services/tx_collection/tx_collection.ts index 72257a22e5a4..800f42601554 100644 --- a/yarn-project/p2p/src/services/tx_collection/tx_collection.ts +++ b/yarn-project/p2p/src/services/tx_collection/tx_collection.ts @@ -25,7 +25,7 @@ export type MissingTxInfo = { blockNumber: number; deadline: Date; readyForReqRe export type FastCollectionRequestInput = | { type: 'block'; block: L2Block } - | { type: 'proposal'; blockProposal: BlockProposal }; + | { type: 'proposal'; blockProposal: BlockProposal; blockNumber: number }; export type FastCollectionRequest = FastCollectionRequestInput & { missingTxHashes: Set; @@ -152,10 +152,11 @@ export class TxCollection { /** Collects the set of txs for the given block proposal as fast as possible */ public collectFastForProposal( blockProposal: BlockProposal, + blockNumber: number, txHashes: TxHash[] | string[], opts: { deadline: Date; pinnedPeer?: PeerId }, ) { - return this.collectFastFor({ type: 'proposal', blockProposal }, txHashes, opts); + return this.collectFastFor({ type: 'proposal', blockProposal, blockNumber }, txHashes, opts); } /** Collects the set of txs for the given mined block as fast as possible */ diff --git a/yarn-project/p2p/src/services/tx_provider.test.ts b/yarn-project/p2p/src/services/tx_provider.test.ts index 041b3fe2705f..84cd874a2f74 100644 --- a/yarn-project/p2p/src/services/tx_provider.test.ts +++ b/yarn-project/p2p/src/services/tx_provider.test.ts @@ -36,7 +36,7 @@ describe('TxProvider', () => { const buildProposal = (txs: Tx[], txHashes: TxHash[]) => { const payload = new ConsensusPayload(ProposedBlockHeader.empty(), Fr.random(), StateReference.empty()); - return new BlockProposal(1, payload, Signature.empty(), txHashes, txs); + return new BlockProposal(payload, Signature.empty(), txHashes, txs); }; const setupTxPools = (txsInPool: number, txsOnP2P: number, txs: Tx[]) => { @@ -74,6 +74,8 @@ describe('TxProvider', () => { .map(({ value }) => value); }; + const blockNumber = 1; + beforeEach(() => { txPools.clear(); additionalP2PTxs.length = 0; @@ -116,7 +118,7 @@ describe('TxProvider', () => { const txs = shuffleTxs(original); const hashes = await Promise.all(txs.map(tx => tx.getTxHash())); const proposal = buildProposal([], hashes); - const results = await txProvider.getTxsForBlockProposal(proposal, opts); + const results = await txProvider.getTxsForBlockProposal(proposal, blockNumber, opts); const expected: TxResults = { txs, missingTxs: [] }; await checkResults(results, expected); expect(txPools.size).toEqual(10); @@ -131,7 +133,7 @@ describe('TxProvider', () => { const hashes = await Promise.all(txs.map(tx => tx.getTxHash())); const proposal = buildProposal([], hashes); - const results = await txProvider.getTxsForBlockProposal(proposal, opts); + const results = await txProvider.getTxsForBlockProposal(proposal, blockNumber, opts); const expected: TxResults = { txs: txs.slice(0, 5), missingTxs: originalHashes.slice(5) }; await checkResults(results, expected); expect(txPools.size).toEqual(5); @@ -145,7 +147,7 @@ describe('TxProvider', () => { const txs = original; const hashes = await Promise.all(txs.map(tx => tx.getTxHash())); const proposal = buildProposal([], hashes); - const results = await txProvider.getTxsForBlockProposal(proposal, opts); + const results = await txProvider.getTxsForBlockProposal(proposal, blockNumber, opts); const expected: TxResults = { txs: txs.slice(0, 6), missingTxs: originalHashes.slice(6) }; await checkResults(results, expected); expect(txPools.size).toEqual(6); @@ -159,7 +161,7 @@ describe('TxProvider', () => { const txs = shuffleTxs([...original]); const hashes = await Promise.all(txs.map(tx => tx.getTxHash())); const proposal = buildProposal(original.slice(6), hashes); - const results = await txProvider.getTxsForBlockProposal(proposal, opts); + const results = await txProvider.getTxsForBlockProposal(proposal, blockNumber, opts); const expected: TxResults = { txs, missingTxs: [] }; await checkResults(results, expected); // all txs should be in the pool @@ -186,7 +188,7 @@ describe('TxProvider', () => { ).map(method => jest.spyOn(txProvider.instrumentation, method)); // Check result is correct - const results = await txProvider.getTxsForBlockProposal(proposal, opts); + const results = await txProvider.getTxsForBlockProposal(proposal, blockNumber, opts); const expected: TxResults = { txs: txs.slice(0, 8), missingTxs: txs.slice(8).map(t => t.txHash) }; await checkResults(results, expected); expect(txPools.size).toEqual(8); @@ -212,7 +214,7 @@ describe('TxProvider', () => { const txs = original; const hashes = await Promise.all(txs.map(tx => tx.getTxHash())); const proposal = buildProposal(txs.slice(4, 8), hashes); - const results = await txProvider.getTxsForBlockProposal(proposal, opts); + const results = await txProvider.getTxsForBlockProposal(proposal, blockNumber, opts); const expected: TxResults = { txs: txs.slice(0, 8), missingTxs: originalHashes.slice(8) }; await checkResults(results, expected); // all txs should be in the pool @@ -231,7 +233,7 @@ describe('TxProvider', () => { // Add additional txs and these should not be added to the pool and not in the results const proposal = buildProposal(txs.slice(4, 8).concat(additional), hashes); - const results = await txProvider.getTxsForBlockProposal(proposal, opts); + const results = await txProvider.getTxsForBlockProposal(proposal, blockNumber, opts); const expected: TxResults = { txs: txs.slice(0, 8), missingTxs: originalHashes.slice(8) }; await checkResults(results, expected); // all txs should be in the pool diff --git a/yarn-project/p2p/src/services/tx_provider.ts b/yarn-project/p2p/src/services/tx_provider.ts index 0e2459ab4699..ef640405d049 100644 --- a/yarn-project/p2p/src/services/tx_provider.ts +++ b/yarn-project/p2p/src/services/tx_provider.ts @@ -55,11 +55,12 @@ export class TxProvider implements ITxProvider { /** Gathers txs from the tx pool, proposal body, remote rpc nodes, and reqresp. */ public getTxsForBlockProposal( blockProposal: BlockProposal, + blockNumber: number, opts: { pinnedPeer: PeerId | undefined; deadline: Date }, ): Promise<{ txs: Tx[]; missingTxs: TxHash[] }> { return this.getOrderedTxsFromAllSources( - { type: 'proposal', blockProposal }, - blockProposal.toBlockInfo(), + { type: 'proposal', blockProposal, blockNumber }, + { ...blockProposal.toBlockInfo(), blockNumber }, blockProposal.txHashes, { ...opts, pinnedPeer: opts.pinnedPeer }, ); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 6ee32c9ac52b..f8710479eb19 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -93,12 +93,7 @@ describe('sequencer', () => { const getAttestations = () => { const consensusPayload = ConsensusPayload.fromBlock(block); - const attestation = new BlockAttestation( - block.header.globalVariables.blockNumber, - consensusPayload, - mockedSig, - mockedSig, - ); + const attestation = new BlockAttestation(consensusPayload, mockedSig, mockedSig); (attestation as any).sender = committee[0]; return [attestation]; }; @@ -106,7 +101,7 @@ describe('sequencer', () => { const createBlockProposal = () => { const consensusPayload = ConsensusPayload.fromBlock(block); const txHashes = block.body.txEffects.map(tx => tx.txHash); - return new BlockProposal(block.header.globalVariables.blockNumber, consensusPayload, mockedSig, txHashes); + return new BlockProposal(consensusPayload, mockedSig, txHashes); }; const processTxs = async (txs: Tx[]) => { @@ -310,7 +305,7 @@ describe('sequencer', () => { block = await makeBlock([tx]); mockPendingTxs([tx]); - await sequencer.doRealWork(); + await sequencer.work(); expectPublisherProposeL2Block(); }); @@ -324,7 +319,7 @@ describe('sequencer', () => { expect(sequencer.getTimeTable().initializeDeadline).toEqual(1); const l1TsForL2Slot1 = Number(l1Constants.l1GenesisTime) + slotDuration; dateProvider.setTime((l1TsForL2Slot1 + 2) * 1000); - await expect(sequencer.doRealWork()).rejects.toThrow( + await expect(sequencer.work()).rejects.toThrow( expect.objectContaining({ name: 'SequencerTooSlowError', message: expect.stringContaining(`Too far into slot`), @@ -353,7 +348,7 @@ describe('sequencer', () => { // we begin immediately after the last L1 block for the previous slot has been mined dateProvider.setTime((l1TsForL2Slot1 - ethereumSlotDuration + 0.1) * 1000); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).toHaveBeenCalled(); expect(validatorClient.collectAttestations).toHaveBeenCalled(); @@ -370,7 +365,7 @@ describe('sequencer', () => { publisher.canProposeAtNextEthBlock.mockReturnValue(Promise.resolve(undefined)); publisher.validateBlockHeader.mockRejectedValue(new Error()); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).not.toHaveBeenCalled(); // Now we can propose, but lets assume that the content is still "bad" (missing sigs etc) @@ -380,14 +375,14 @@ describe('sequencer', () => { timeOfNextL1Slot: 1000n, }); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).not.toHaveBeenCalled(); // Now it is! publisher.validateBlockHeader.mockClear(); publisher.validateBlockHeader.mockResolvedValue(); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).toHaveBeenCalledWith( expect.anything(), expect.anything(), @@ -403,13 +398,13 @@ describe('sequencer', () => { // block is not built with 0 txs mockPendingTxs([]); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).toHaveBeenCalledTimes(0); // block is not built with 3 txs mockPendingTxs(txs.slice(0, 3)); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).toHaveBeenCalledTimes(0); // block is built with 4 txs @@ -417,7 +412,7 @@ describe('sequencer', () => { mockPendingTxs(neededTxs); block = await makeBlock(neededTxs); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).toHaveBeenCalledWith( expect.anything(), @@ -473,7 +468,7 @@ describe('sequencer', () => { ); publisher.canProposeAtNextEthBlock.mockResolvedValueOnce(undefined); - await sequencer.doRealWork(); + await sequencer.work(); expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); }); @@ -483,11 +478,11 @@ describe('sequencer', () => { block = await makeBlock([tx]); l2BlockSource.getL1Timestamp.mockResolvedValue(1000n - BigInt(ethereumSlotDuration) - 1n); - await sequencer.doRealWork(); + await sequencer.work(); expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); l2BlockSource.getL1Timestamp.mockResolvedValue(1000n - BigInt(ethereumSlotDuration)); - await sequencer.doRealWork(); + await sequencer.work(); expect(publisher.enqueueProposeL2Block).toHaveBeenCalled(); }); @@ -499,7 +494,7 @@ describe('sequencer', () => { // This could practically be for any reason, e.g., could also be that we have entered a new slot. publisher.validateBlockHeader.mockResolvedValueOnce().mockRejectedValueOnce(new Error('No block for you')); - await sequencer.doRealWork(); + await sequencer.work(); expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); }); @@ -511,7 +506,7 @@ describe('sequencer', () => { validatorClient.createBlockProposal.mockResolvedValue(undefined); - await sequencer.doRealWork(); + await sequencer.work(); expect(publisher.enqueueProposeL2Block).not.toHaveBeenCalled(); }); @@ -523,7 +518,7 @@ describe('sequencer', () => { publisher.enqueueProposeL2Block.mockRejectedValueOnce(new Error('Failed to enqueue propose L2 block')); - await sequencer.doRealWork(); + await sequencer.work(); expectPublisherProposeL2Block(); // Even though the block publish was not enqueued, we still send any requests @@ -532,7 +527,7 @@ describe('sequencer', () => { it('should proceed with block proposal when there is no proposer yet', async () => { // Mock that there is no official proposer yet - epochCache.getProposerAttesterAddressInNextSlot.mockResolvedValueOnce(undefined); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValueOnce(undefined); epochCache.getCommittee.mockResolvedValueOnce({ committee: [] as EthAddress[] } as EpochCommitteeInfo); // Mock that we have some pending transactions @@ -540,7 +535,7 @@ describe('sequencer', () => { mockPendingTxs(txs); block = await makeBlock(txs); - await sequencer.doRealWork(); + await sequencer.work(); // Verify that the sequencer attempted to create and broadcast a block proposal expect(publisher.enqueueProposeL2Block).toHaveBeenCalled(); @@ -610,7 +605,7 @@ describe('sequencer', () => { mockPendingTxs([tx]); block = await makeBlock([tx]); - await sequencer.doRealWork(); + await sequencer.work(); expect(blockBuilder.buildBlock).toHaveBeenCalledWith( expect.anything(), expect.anything(), @@ -631,6 +626,132 @@ describe('sequencer', () => { } }); }); + + describe('voting when sync fails', () => { + beforeEach(() => { + // Mock that sync fails + const differentHash = Fr.random().toString(); + worldState.status.mockResolvedValue({ + state: WorldStateRunningState.IDLE, + syncSummary: { + latestBlockNumber: lastBlockNumber, + latestBlockHash: differentHash, // Different hash causes sync check to fail + } as WorldStateSyncStatus, + }); + }); + + const mockSlashActions = [{ type: 'vote-offenses' as const, round: 1n, votes: [], committees: [] }]; + + it('should vote on slashing and governance when sync fails and past initialize deadline', async () => { + // Set time to be past the initializeDeadline (which is 1s based on test config) + // Build start is: l1GenesisTime + slotNumber * slotDuration - ethereumSlotDuration + // For slot 1: l1GenesisTime + 1 * 8 - 4 = l1GenesisTime + 4 + expect(sequencer.getTimeTable().initializeDeadline).toEqual(1); + const buildStartTime = Number(l1Constants.l1GenesisTime) + slotDuration - ethereumSlotDuration; + dateProvider.setTime((buildStartTime + 2) * 1000); // 2 seconds after build start, past the 1s deadline + + // Mock slashing actions + slasherClient.getProposerActions.mockResolvedValue(mockSlashActions); + + // Set us as the proposer + validatorClient.getValidatorAddresses.mockReturnValue([signer.address]); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(signer.address); + + // Mock governance payload + const governancePayload = EthAddress.random(); + sequencer.updateConfig({ governanceProposerPayload: governancePayload }); + + // Mock publisher methods to return true + publisher.enqueueSlashingActions.mockResolvedValue(true); + publisher.enqueueGovernanceCastSignal.mockResolvedValue(true); + + await sequencer.work(); + + // We're testing the new behavior - that we try to vote even when sync fails + // when we're past the time we could build a block + expect(slasherClient.getProposerActions).toHaveBeenCalledWith(1n); + expect(publisher.enqueueSlashingActions).toHaveBeenCalled(); + expect(publisher.enqueueGovernanceCastSignal).toHaveBeenCalledWith( + governancePayload, + 1n, + 1000n, + expect.any(EthAddress), + expect.any(Function), + ); + expect(publisher.sendRequests).toHaveBeenCalled(); + }); + + it('should not vote when sync fails and within time limit', async () => { + // Set time to be within the max allowed time + // Build start is: l1GenesisTime + slotNumber * slotDuration - ethereumSlotDuration + // For slot 1: l1GenesisTime + 1 * 8 - 4 = l1GenesisTime + 4 + // initializeDeadline is 1s, so we need to be less than 1s after the build start + const buildStartTime = Number(l1Constants.l1GenesisTime) + slotDuration - ethereumSlotDuration; + dateProvider.setTime((buildStartTime + 0.5) * 1000); // 0.5s after build start, within 1s deadline + + // Mock slashing actions + slasherClient.getProposerActions.mockResolvedValue(mockSlashActions); + + // Set us as the proposer + validatorClient.getValidatorAddresses.mockReturnValue([signer.address]); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(signer.address); + + await sequencer.work(); + + // Should not attempt to enqueue slashing actions when within time limit + expect(publisher.enqueueSlashingActions).not.toHaveBeenCalled(); + }); + + it('should not vote when sync fails but not a proposer', async () => { + // Set time to be past the max allowed time + const buildStartTime = Number(l1Constants.l1GenesisTime) + slotDuration - ethereumSlotDuration; + dateProvider.setTime((buildStartTime + 2) * 1000); // 2s after build start, past 1s deadline + + // Mock slashing actions + slasherClient.getProposerActions.mockResolvedValue(mockSlashActions); + + // Set us as NOT the proposer + validatorClient.getValidatorAddresses.mockReturnValue([EthAddress.random()]); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(signer.address); // Different address + + await sequencer.work(); + + // Should not vote when not a proposer + expect(publisher.enqueueSlashingActions).not.toHaveBeenCalled(); + }); + + it('should not attempt to vote twice in the same slot', async () => { + // Set time to be past the max allowed time + const buildStartTime = Number(l1Constants.l1GenesisTime) + slotDuration - ethereumSlotDuration; + dateProvider.setTime((buildStartTime + 2) * 1000); // 2s after build start, past 1s deadline + + // Mock slashing actions + slasherClient.getProposerActions.mockResolvedValue(mockSlashActions); + + // Set us as the proposer + validatorClient.getValidatorAddresses.mockReturnValue([signer.address]); + epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(signer.address); + + // Mock publisher methods + publisher.enqueueSlashingActions.mockResolvedValue(true); + + // First attempt should succeed + await sequencer.work(); + expect(publisher.enqueueSlashingActions).toHaveBeenCalledTimes(1); + expect(publisher.sendRequests).toHaveBeenCalledTimes(1); + + // Reset mocks + publisher.enqueueSlashingActions.mockClear(); + publisher.sendRequests.mockClear(); + slasherClient.getProposerActions.mockClear(); + + // Second attempt in the same slot should be skipped + await sequencer.work(); + expect(slasherClient.getProposerActions).not.toHaveBeenCalled(); + expect(publisher.enqueueSlashingActions).not.toHaveBeenCalled(); + expect(publisher.sendRequests).not.toHaveBeenCalled(); + }); + }); }); class TestSubject extends Sequencer { @@ -642,9 +763,9 @@ class TestSubject extends Sequencer { this.l1Constants.l1GenesisTime = BigInt(l1GenesisTime); } - public override doRealWork() { + public override work() { this.setState(SequencerState.IDLE, undefined, { force: true }); - return super.doRealWork(); + return super.work(); } public override getBlockBuilderOptions(slot: number): PublicProcessorLimits { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index fa8fbebd06a2..3e7d4836ee62 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -96,6 +96,9 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter= slot) { + this.log.debug( + `Cannot propose block at next L2 slot ${slot} since that slot was taken by our own block ${this.lastBlockPublished.number}`, + ); + return; + } - // Do not go forward with new block if the previous one has not been mined and processed + // Check all components are synced to latest as seen by the archiver (queries all subsystems) + const syncedTo = await this.checkSync({ ts, slot }); if (!syncedTo) { + await this.tryVoteWhenSyncFails({ slot, ts }); return; } const chainTipArchive = syncedTo.archive; const newBlockNumber = syncedTo.blockNumber + 1; - const { slot, ts, now } = this.epochCache.getEpochAndSlotInNextL1Slot(); - - this.setState(SequencerState.PROPOSER_CHECK, slot); - - // Check that the archiver and dependencies have synced to the previous L1 slot at least - // TODO(#14766): Archiver reports L1 timestamp based on L1 blocks seen, which means that a missed L1 block will - // cause the archiver L1 timestamp to fall behind, and cause this sequencer to start processing one L1 slot later. const syncLogData = { now, syncedToL1Ts: syncedTo.l1Timestamp, @@ -278,74 +282,30 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter= slot) { - this.log.debug( + this.log.warn( `Cannot propose block at next L2 slot ${slot} since that slot was taken by block ${syncedTo.blockNumber}`, { ...syncLogData, block: syncedTo.block.header.toInspect() }, ); return; } - // Or that we haven't published it ourselves - if (this.lastBlockPublished && this.lastBlockPublished.header.getSlot() >= slot) { - this.log.debug( - `Cannot propose block at next L2 slot ${slot} since that slot was taken by our own block ${this.lastBlockPublished.number}`, - { ...syncLogData, block: this.lastBlockPublished.header.toInspect() }, - ); - return; - } - // Check that we are a proposer for the next slot - let proposerInNextSlot: EthAddress | undefined; - try { - proposerInNextSlot = await this.epochCache.getProposerAttesterAddressInNextSlot(); - } catch (e) { - if (e instanceof NoCommitteeError) { - this.log.warn( - `Cannot propose block ${newBlockNumber} at next L2 slot ${slot} since the committee does not exist on L1`, - ); - return; - } - } + this.setState(SequencerState.PROPOSER_CHECK, slot); + const [canPropose, proposer] = await this.checkCanPropose(slot); - // If get proposer in next slot is undefined, then the committee is empty and anyone may propose. - // If the committee is defined and not empty, but none of our validators are the proposer, then stop. - const validatorAddresses = this.validatorClient!.getValidatorAddresses(); - if (proposerInNextSlot !== undefined && !validatorAddresses.some(addr => addr.equals(proposerInNextSlot))) { - this.log.debug(`Cannot propose block ${newBlockNumber} since we are not a proposer`, { - us: validatorAddresses, - proposer: proposerInNextSlot, - ...syncLogData, - }); - // If the pending chain is invalid, we may need to invalidate the block if no one else is doing it. - if (!syncedTo.pendingChainValidationStatus.valid) { - // We pass i undefined here to get any available publisher. - const { publisher } = await this.publisherFactory.create(undefined); - await this.considerInvalidatingBlock(syncedTo, slot, validatorAddresses, publisher); - } + // If we are not a proposer, check if we should invalidate a invalid block, and bail + if (!canPropose) { + await this.considerInvalidatingBlock(syncedTo, slot); return; } - // Check with the rollup if we can indeed propose at the next L2 slot. This check should not fail - // if all the previous checks are good, but we do it just in case. - const proposerAddressInNextSlot = proposerInNextSlot ?? EthAddress.ZERO; - // We now need to get ourselves a publisher. // The returned attestor will be the one we provided if we provided one. // Otherwise it will be a valid attestor for the returned publisher. - const { attestorAddress, publisher } = await this.publisherFactory.create(proposerInNextSlot); - + const { attestorAddress, publisher } = await this.publisherFactory.create(proposer); this.log.verbose(`Created publisher at address ${publisher.getSenderAddress()} for attestor ${attestorAddress}`); - this.publisher = publisher; const coinbase = this.validatorClient!.getCoinbaseForAttestor(attestorAddress); @@ -353,9 +313,12 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter - this.validatorClient!.signWithAddress(attestorAddress, msg).then(s => s.toString()); - - const enqueueGovernanceSignalPromise = - this.governanceProposerPayload && !this.governanceProposerPayload.isZero() - ? publisher - .enqueueGovernanceCastSignal(this.governanceProposerPayload, slot, timestamp, attestorAddress, signerFn) - .catch(err => { - this.log.error(`Error enqueuing governance vote`, err, { blockNumber: newBlockNumber, slot }); - return false; - }) - : Promise.resolve(false); - - const enqueueSlashingActionsPromise = this.slasherClient - ? this.slasherClient - .getProposerActions(slot) - .then(actions => publisher.enqueueSlashingActions(actions, slot, timestamp, attestorAddress, signerFn)) - .catch(err => { - this.log.error(`Error enqueuing slashing actions`, err, { blockNumber: newBlockNumber, slot }); - return false; - }) - : Promise.resolve(false); + // Enqueue governance and slashing votes (returns promises that will be awaited later) + const votesPromises = this.enqueueGovernanceAndSlashingVotes( + publisher, + attestorAddress, + slot, + newGlobalVariables.timestamp, + ); + // Enqueues block invalidation if (invalidateBlock && !this.config.skipInvalidateBlockAsProposer) { publisher.enqueueInvalidateBlock(invalidateBlock); } + // Actual block building this.setState(SequencerState.INITIALIZING_PROPOSAL, slot); + const block: L2Block | undefined = await this.tryBuildBlockAndEnqueuePublish( + slot, + proposer, + newBlockNumber, + publisher, + newGlobalVariables, + chainTipArchive, + invalidateBlock, + ); + + // Wait until the voting promises have resolved, so all requests are enqueued + await Promise.all(votesPromises); + + // And send the tx to L1 + const l1Response = await publisher.sendRequests(); + const proposedBlock = l1Response?.successfulActions.find(a => a === 'propose'); + if (proposedBlock) { + this.lastBlockPublished = block; + this.emit('block-published', { blockNumber: newBlockNumber, slot: Number(slot) }); + await this.metrics.incFilledSlot(publisher.getSenderAddress().toString(), coinbase); + } else if (block) { + this.emit('block-publish-failed', l1Response ?? {}); + } - this.metrics.incOpenSlot(slot, proposerAddressInNextSlot.toString()); + this.setState(SequencerState.IDLE, undefined); + } + + /** Tries building a block proposal, and if successful, enqueues it for publishing. */ + private async tryBuildBlockAndEnqueuePublish( + slot: bigint, + proposer: EthAddress | undefined, + newBlockNumber: number, + publisher: SequencerPublisher, + newGlobalVariables: GlobalVariables, + chainTipArchive: Fr, + invalidateBlock: InvalidateBlockRequest | undefined, + ) { + this.metrics.incOpenSlot(slot, (proposer ?? EthAddress.ZERO).toString()); this.log.verbose(`Preparing proposal for block ${newBlockNumber} at slot ${slot}`, { - proposer: proposerInNextSlot?.toString(), - coinbase, + proposer, publisher: publisher.getSenderAddress(), - feeRecipient, globalVariables: newGlobalVariables.toInspect(), chainTipArchive, blockNumber: newBlockNumber, slot, }); - // If I created a "partial" header here that should make our job much easier. const proposalHeader = ProposedBlockHeader.from({ ...newGlobalVariables, timestamp: newGlobalVariables.timestamp, @@ -457,7 +436,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter a === 'propose'); - if (proposedBlock) { - this.lastBlockPublished = block; - this.emit('block-published', { blockNumber: newBlockNumber, slot: Number(slot) }); - await this.metrics.incFilledSlot(publisher.getSenderAddress().toString(), coinbase); - } else if (block) { - this.emit('block-publish-failed', l1Response ?? {}); - } - - this.setState(SequencerState.IDLE, undefined); + return block; } @trackSpan('Sequencer.work') - protected async work() { + protected async safeWork() { try { - await this.doRealWork(); + await this.work(); } catch (err) { if (err instanceof SequencerTooSlowError) { // Log as warn only if we had to abort halfway through the block proposal @@ -825,9 +791,8 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter= INITIAL_L2_BLOCK_NUM) { - const block = await this.l2BlockSource.getBlock(blockNumber); - if (!block) { - // this shouldn't really happen because a moment ago we checked that all components were in sync - this.log.warn(`Failed to get L2 block ${blockNumber} from the archiver with all components in sync`, logData); - return undefined; - } - - return { - block, - blockNumber: block.number, - archive: block.archive.root, + // Check that the archiver and dependencies have synced to the previous L1 slot at least + // TODO(#14766): Archiver reports L1 timestamp based on L1 blocks seen, which means that a missed L1 block will + // cause the archiver L1 timestamp to fall behind, and cause this sequencer to start processing one L1 slot later. + const { slot, ts } = args; + if (l1Timestamp + BigInt(this.l1Constants.ethereumSlotDuration) < ts) { + this.log.debug(`Cannot propose block at next L2 slot ${slot} due to pending sync from L1`, { + slot, + ts, l1Timestamp, - pendingChainValidationStatus, - }; - } else { + }); + return undefined; + } + + // Special case for genesis state + const blockNumber = worldState.number; + if (blockNumber < INITIAL_L2_BLOCK_NUM) { const archive = new Fr((await this.worldState.getCommitted().getTreeInfo(MerkleTreeId.ARCHIVE)).root); return { blockNumber: INITIAL_L2_BLOCK_NUM - 1, archive, l1Timestamp, pendingChainValidationStatus }; } + + const block = await this.l2BlockSource.getBlock(blockNumber); + if (!block) { + // this shouldn't really happen because a moment ago we checked that all components were in sync + this.log.error(`Failed to get L2 block ${blockNumber} from the archiver with all components in sync`); + return undefined; + } + + return { + block, + blockNumber: block.number, + archive: block.archive.root, + l1Timestamp, + pendingChainValidationStatus, + }; + } + + /** + * Enqueues governance and slashing votes with the publisher. Does not block. + * @param publisher - The publisher to enqueue votes with + * @param attestorAddress - The attestor address to use for signing + * @param slot - The slot number + * @param timestamp - The timestamp for the votes + * @param context - Optional context for logging (e.g., block number) + * @returns A tuple of [governanceEnqueued, slashingEnqueued] + */ + protected enqueueGovernanceAndSlashingVotes( + publisher: SequencerPublisher, + attestorAddress: EthAddress, + slot: bigint, + timestamp: bigint, + ): [Promise | undefined, Promise | undefined] { + try { + const signerFn = (msg: TypedDataDefinition) => + this.validatorClient!.signWithAddress(attestorAddress, msg).then(s => s.toString()); + + const enqueueGovernancePromise = + this.governanceProposerPayload && !this.governanceProposerPayload.isZero() + ? publisher + .enqueueGovernanceCastSignal(this.governanceProposerPayload, slot, timestamp, attestorAddress, signerFn) + .catch(err => { + this.log.error(`Error enqueuing governance vote`, err, { slot }); + return false; + }) + : undefined; + + const enqueueSlashingPromise = this.slasherClient + ? this.slasherClient + .getProposerActions(slot) + .then(actions => publisher.enqueueSlashingActions(actions, slot, timestamp, attestorAddress, signerFn)) + .catch(err => { + this.log.error(`Error enqueuing slashing actions`, err, { slot }); + return false; + }) + : undefined; + + return [enqueueGovernancePromise, enqueueSlashingPromise]; + } catch (err) { + this.log.error(`Error enqueueing governance and slashing votes`, err); + return [undefined, undefined]; + } + } + + /** + * Checks if we are the proposer for the next slot. + * @returns True if we can propose, and the proposer address (undefined if anyone can propose) + */ + protected async checkCanPropose(slot: bigint): Promise<[boolean, EthAddress | undefined]> { + let proposer: EthAddress | undefined; + try { + proposer = await this.epochCache.getProposerAttesterAddressInSlot(slot); + } catch (e) { + if (e instanceof NoCommitteeError) { + this.log.warn(`Cannot propose at next L2 slot ${slot} since the committee does not exist on L1`); + return [false, undefined]; + } + this.log.error(`Error getting proposer for slot ${slot}`, e); + return [false, undefined]; + } + + // If proposer is undefined, then the committee is empty and anyone may propose + if (proposer === undefined) { + return [true, undefined]; + } + + const validatorAddresses = this.validatorClient!.getValidatorAddresses(); + const weAreProposer = validatorAddresses.some(addr => addr.equals(proposer)); + + if (!weAreProposer) { + this.log.debug(`Cannot propose at slot ${slot} since we are not a proposer`, { validatorAddresses, proposer }); + return [false, proposer]; + } + + return [true, proposer]; + } + + /** + * Tries to vote on slashing actions and governance when the sync check fails but we're past the max time for initializing a proposal. + * This allows the sequencer to participate in governance/slashing votes even when it cannot build blocks. + */ + protected async tryVoteWhenSyncFails(args: { slot: bigint; ts: bigint }): Promise { + const { slot, ts } = args; + + // Prevent duplicate attempts in the same slot + if (this.lastSlotForVoteWhenSyncFailed === slot) { + this.log.debug(`Already attempted to vote in slot ${slot} (skipping)`); + return; + } + + // Check if we're past the max time for initializing a proposal + const secondsIntoSlot = this.getSecondsIntoSlot(slot); + const maxAllowedTime = this.timetable.getMaxAllowedTime(SequencerState.INITIALIZING_PROPOSAL); + + // If we haven't exceeded the time limit for initializing a proposal, don't proceed with voting + // We use INITIALIZING_PROPOSAL time limit because if we're past that, we can't build a block anyway + if (maxAllowedTime === undefined || secondsIntoSlot <= maxAllowedTime) { + this.log.trace(`Not attempting to vote since there is still for block building`, { + secondsIntoSlot, + maxAllowedTime, + }); + return; + } + + this.log.debug(`Sync for slot ${slot} failed, checking for voting opportunities`, { + secondsIntoSlot, + maxAllowedTime, + }); + + // Check if we're a proposer or proposal is open + const [canPropose, proposer] = await this.checkCanPropose(slot); + if (!canPropose) { + this.log.debug(`Cannot vote in slot ${slot} since we are not a proposer`, { slot, proposer }); + return; + } + + // Mark this slot as attempted + this.lastSlotForVoteWhenSyncFailed = slot; + + // Get a publisher for voting + const { attestorAddress, publisher } = await this.publisherFactory.create(proposer); + + this.log.debug(`Attempting to vote despite sync failure at slot ${slot}`, { + attestorAddress, + slot, + }); + + // Enqueue governance and slashing votes using the shared helper method + const votesPromises = this.enqueueGovernanceAndSlashingVotes(publisher, attestorAddress, slot, ts); + await Promise.all(votesPromises); + + if (votesPromises.every(p => !p)) { + this.log.debug(`No votes to enqueue for slot ${slot}`); + return; + } + + this.log.info(`Voting in slot ${slot} despite sync failure`, { slot }); + await publisher.sendRequests(); } /** @@ -897,19 +1016,19 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter>>, + syncedTo: NonNullable>>, currentSlot: bigint, - ourValidatorAddresses: EthAddress[], - publisher: SequencerPublisher, ): Promise { const { pendingChainValidationStatus, l1Timestamp } = syncedTo; if (pendingChainValidationStatus.valid) { return; } + const { publisher } = await this.publisherFactory.create(undefined); const invalidBlockNumber = pendingChainValidationStatus.block.blockNumber; const invalidBlockTimestamp = pendingChainValidationStatus.block.timestamp; const timeSinceChainInvalid = this.dateProvider.nowInSeconds() - Number(invalidBlockTimestamp); + const ourValidatorAddresses = this.validatorClient!.getValidatorAddresses(); const { secondsBeforeInvalidatingBlockAsCommitteeMember, secondsBeforeInvalidatingBlockAsNonCommitteeMember } = this.config; diff --git a/yarn-project/sequencer-client/src/sequencer/timetable.ts b/yarn-project/sequencer-client/src/sequencer/timetable.ts index 53f3b169f2ee..5c724b00ea2e 100644 --- a/yarn-project/sequencer-client/src/sequencer/timetable.ts +++ b/yarn-project/sequencer-client/src/sequencer/timetable.ts @@ -137,6 +137,19 @@ export class SequencerTimetable { return validationTimeEnd; } + public getMaxAllowedTime( + state: Extract< + SequencerState, + SequencerState.STOPPED | SequencerState.STOPPING | SequencerState.IDLE | SequencerState.SYNCHRONIZING + >, + ): undefined; + public getMaxAllowedTime( + state: Exclude< + SequencerState, + SequencerState.STOPPED | SequencerState.STOPPING | SequencerState.IDLE | SequencerState.SYNCHRONIZING + >, + ): number; + public getMaxAllowedTime(state: SequencerState): number | undefined; public getMaxAllowedTime(state: SequencerState): number | undefined { switch (state) { case SequencerState.STOPPED: diff --git a/yarn-project/stdlib/src/block/l2_block_source.ts b/yarn-project/stdlib/src/block/l2_block_source.ts index 2946abd3836b..a66466bd174f 100644 --- a/yarn-project/stdlib/src/block/l2_block_source.ts +++ b/yarn-project/stdlib/src/block/l2_block_source.ts @@ -1,4 +1,5 @@ import type { EthAddress } from '@aztec/foundation/eth-address'; +import type { Fr } from '@aztec/foundation/fields'; import type { TypedEventEmitter } from '@aztec/foundation/types'; import { z } from 'zod'; @@ -66,6 +67,34 @@ export interface L2BlockSource { /** Equivalent to getBlocks but includes publish data. */ getPublishedBlocks(from: number, limit: number, proven?: boolean): Promise; + /** + * Gets a published block by its hash. + * @param blockHash - The block hash to retrieve. + * @returns The requested published block (or undefined if not found). + */ + getPublishedBlockByHash(blockHash: Fr): Promise; + + /** + * Gets a published block by its archive root. + * @param archive - The archive root to retrieve. + * @returns The requested published block (or undefined if not found). + */ + getPublishedBlockByArchive(archive: Fr): Promise; + + /** + * Gets a block header by its hash. + * @param blockHash - The block hash to retrieve. + * @returns The requested block header (or undefined if not found). + */ + getBlockHeaderByHash(blockHash: Fr): Promise; + + /** + * Gets a block header by its archive root. + * @param archive - The archive root to retrieve. + * @returns The requested block header (or undefined if not found). + */ + getBlockHeaderByArchive(archive: Fr): Promise; + /** * Gets a tx effect. * @param txHash - The hash of the tx corresponding to the tx effect. @@ -120,6 +149,9 @@ export interface L2BlockSource { */ getL1Constants(): Promise; + /** Returns values for the genesis block */ + getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }>; + /** Latest synced L1 timestamp. */ getL1Timestamp(): Promise; diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index d06a339164c0..9fecaeb83fbe 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -91,6 +91,16 @@ describe('ArchiverApiSchema', () => { expect(result).toBeInstanceOf(BlockHeader); }); + it('getBlockHeaderByArchive', async () => { + const result = await context.client.getBlockHeaderByArchive(Fr.random()); + expect(result).toBeInstanceOf(BlockHeader); + }); + + it('getBlockHeaderByHash', async () => { + const result = await context.client.getBlockHeaderByHash(Fr.random()); + expect(result).toBeInstanceOf(BlockHeader); + }); + it('getBlocks', async () => { const result = await context.client.getBlocks(1, 1); expect(result).toEqual([expect.any(L2Block)]); @@ -104,6 +114,22 @@ describe('ArchiverApiSchema', () => { expect(response[0].l1).toBeDefined(); }); + it('getPublishedBlockByArchive', async () => { + const result = await context.client.getPublishedBlockByArchive(Fr.random()); + expect(result).toBeDefined(); + expect(result!.block.constructor.name).toEqual('L2Block'); + expect(result!.attestations[0]).toBeInstanceOf(CommitteeAttestation); + expect(result!.l1).toBeDefined(); + }); + + it('getPublishedBlockByHash', async () => { + const result = await context.client.getPublishedBlockByHash(Fr.random()); + expect(result).toBeDefined(); + expect(result!.block.constructor.name).toEqual('L2Block'); + expect(result!.attestations[0]).toBeInstanceOf(CommitteeAttestation); + expect(result!.l1).toBeDefined(); + }); + it('getTxEffect', async () => { const result = await context.client.getTxEffect(TxHash.fromBuffer(Buffer.alloc(32, 1))); expect(result!.data).toBeInstanceOf(TxEffect); @@ -256,11 +282,19 @@ describe('ArchiverApiSchema', () => { const result = await context.client.isPendingChainInvalid(); expect(result).toBe(false); }); + + it('getGenesisValues', async () => { + const result = await context.client.getGenesisValues(); + expect(result).toEqual({ genesisArchiveRoot: expect.any(Fr) }); + }); }); class MockArchiver implements ArchiverApi { constructor(private artifact: ContractArtifact) {} + getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> { + return Promise.resolve({ genesisArchiveRoot: Fr.random() }); + } isPendingChainInvalid(): Promise { return Promise.resolve(false); } @@ -300,6 +334,26 @@ class MockArchiver implements ArchiverApi { }), ]; } + async getPublishedBlockByHash(_blockHash: Fr): Promise { + return PublishedL2Block.fromFields({ + block: await L2Block.random(1), + attestations: [CommitteeAttestation.random()], + l1: { blockHash: `0x`, blockNumber: 1n, timestamp: 0n }, + }); + } + async getPublishedBlockByArchive(_archive: Fr): Promise { + return PublishedL2Block.fromFields({ + block: await L2Block.random(1), + attestations: [CommitteeAttestation.random()], + l1: { blockHash: `0x`, blockNumber: 1n, timestamp: 0n }, + }); + } + getBlockHeaderByHash(_blockHash: Fr): Promise { + return Promise.resolve(BlockHeader.empty()); + } + getBlockHeaderByArchive(_archive: Fr): Promise { + return Promise.resolve(BlockHeader.empty()); + } async getTxEffect(_txHash: TxHash): Promise { expect(_txHash).toBeInstanceOf(TxHash); return { diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index fa36b76d60c0..e5ca51c73d36 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -83,6 +83,10 @@ export const ArchiverApiSchema: ApiSchemaFor = { .function() .args(schemas.Integer, schemas.Integer, optional(z.boolean())) .returns(z.array(PublishedL2Block.schema)), + getPublishedBlockByHash: z.function().args(schemas.Fr).returns(PublishedL2Block.schema.optional()), + getPublishedBlockByArchive: z.function().args(schemas.Fr).returns(PublishedL2Block.schema.optional()), + getBlockHeaderByHash: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), + getBlockHeaderByArchive: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), getTxEffect: z.function().args(TxHash.schema).returns(indexedTxSchema().optional()), getSettledTxReceipt: z.function().args(TxHash.schema).returns(TxReceipt.schema.optional()), getL2SlotNumber: z.function().args().returns(schemas.BigInt), @@ -110,6 +114,10 @@ export const ArchiverApiSchema: ApiSchemaFor = { getL1ToL2MessageIndex: z.function().args(schemas.Fr).returns(schemas.BigInt.optional()), getDebugFunctionName: z.function().args(schemas.AztecAddress, schemas.FunctionSelector).returns(optional(z.string())), getL1Constants: z.function().args().returns(L1RollupConstantsSchema), + getGenesisValues: z + .function() + .args() + .returns(z.object({ genesisArchiveRoot: schemas.Fr })), getL1Timestamp: z.function().args().returns(schemas.BigInt), syncImmediate: z.function().args().returns(z.void()), isPendingChainInvalid: z.function().args().returns(z.boolean()), diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts index a33126a57de1..45d94007a223 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts @@ -5,7 +5,7 @@ import { type Offense, OffenseType, type SlashPayloadRound } from '../slashing/i import { type AztecNodeAdmin, AztecNodeAdminApiSchema } from './aztec-node-admin.js'; import type { SequencerConfig } from './configs.js'; import type { ProverConfig } from './prover-client.js'; -import type { ValidatorClientConfig } from './server.js'; +import type { ValidatorClientFullConfig } from './server.js'; import type { SlasherConfig } from './slasher.js'; describe('AztecNodeAdminApiSchema', () => { @@ -126,7 +126,7 @@ class MockAztecNodeAdmin implements AztecNodeAdmin { ]); } getConfig(): Promise< - ValidatorClientConfig & SequencerConfig & ProverConfig & SlasherConfig & { maxTxPoolSize: number } + ValidatorClientFullConfig & SequencerConfig & ProverConfig & SlasherConfig & { maxTxPoolSize: number } > { return Promise.resolve({ realProofs: false, @@ -164,6 +164,7 @@ class MockAztecNodeAdmin implements AztecNodeAdmin { attestationPollingIntervalMs: 1000, validatorReexecute: true, validatorReexecuteDeadlineMs: 1000, + disableTransactions: false, }); } startSnapshotUpload(_location: string): Promise { diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts index 3ce5fe13a5dc..eb8ddd322aea 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts @@ -9,7 +9,7 @@ import { type ArchiverSpecificConfig, ArchiverSpecificConfigSchema } from './arc import { type SequencerConfig, SequencerConfigSchema } from './configs.js'; import { type ProverConfig, ProverConfigSchema } from './prover-client.js'; import { type SlasherConfig, SlasherConfigSchema } from './slasher.js'; -import { ValidatorClientConfigSchema, type ValidatorClientFullConfig } from './validator.js'; +import { type ValidatorClientFullConfig, ValidatorClientFullConfigSchema } from './validator.js'; /** * Aztec node admin API. @@ -62,7 +62,7 @@ export type AztecNodeAdminConfig = ValidatorClientFullConfig & export const AztecNodeAdminConfigSchema = SequencerConfigSchema.merge(ProverConfigSchema) .merge(SlasherConfigSchema) - .merge(ValidatorClientConfigSchema) + .merge(ValidatorClientFullConfigSchema) .merge( ArchiverSpecificConfigSchema.pick({ archiverPollingIntervalMS: true, diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts index 17ed537238e8..eeba21760f49 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts @@ -174,6 +174,26 @@ describe('AztecNodeApiSchema', () => { expect(response).toBeInstanceOf(L2Block); }); + it('getBlockByHash', async () => { + const response = await context.client.getBlockByHash(Fr.random()); + expect(response).toBeInstanceOf(L2Block); + }); + + it('getBlockByArchive', async () => { + const response = await context.client.getBlockByArchive(Fr.random()); + expect(response).toBeInstanceOf(L2Block); + }); + + it('getBlockHeaderByHash', async () => { + const response = await context.client.getBlockHeaderByHash(Fr.random()); + expect(response).toBeInstanceOf(BlockHeader); + }); + + it('getBlockHeaderByArchive', async () => { + const response = await context.client.getBlockHeaderByArchive(Fr.random()); + expect(response).toBeInstanceOf(BlockHeader); + }); + it('getCurrentBaseFees', async () => { const response = await context.client.getCurrentBaseFees(); expect(response).toEqual(GasFees.empty()); @@ -585,6 +605,18 @@ class MockAztecNode implements AztecNode { getBlock(number: number): Promise { return Promise.resolve(L2Block.random(number)); } + getBlockByHash(_blockHash: Fr): Promise { + return Promise.resolve(L2Block.random(1)); + } + getBlockByArchive(_archive: Fr): Promise { + return Promise.resolve(L2Block.random(1)); + } + getBlockHeaderByHash(_blockHash: Fr): Promise { + return Promise.resolve(BlockHeader.empty()); + } + getBlockHeaderByArchive(_archive: Fr): Promise { + return Promise.resolve(BlockHeader.empty()); + } getCurrentBaseFees(): Promise { return Promise.resolve(GasFees.empty()); } diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.ts b/yarn-project/stdlib/src/interfaces/aztec-node.ts index 11b5b2bee02f..4a1fbd94cf01 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.ts @@ -224,6 +224,20 @@ export interface AztecNode */ getBlock(number: L2BlockNumber): Promise; + /** + * Get a block specified by its hash. + * @param blockHash - The block hash being requested. + * @returns The requested block. + */ + getBlockByHash(blockHash: Fr): Promise; + + /** + * Get a block specified by its archive root. + * @param archive - The archive root being requested. + * @returns The requested block. + */ + getBlockByArchive(archive: Fr): Promise; + /** * Method to fetch the latest block number synchronized by the node. * @returns The block number. @@ -399,6 +413,20 @@ export interface AztecNode */ getBlockHeader(blockNumber?: L2BlockNumber): Promise; + /** + * Get a block header specified by its hash. + * @param blockHash - The block hash being requested. + * @returns The requested block header. + */ + getBlockHeaderByHash(blockHash: Fr): Promise; + + /** + * Get a block header specified by its archive root. + * @param archive - The archive root being requested. + * @returns The requested block header. + */ + getBlockHeaderByArchive(archive: Fr): Promise; + /** Returns stats for validators if enabled. */ getValidatorsStats(): Promise; @@ -516,6 +544,10 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getBlock: z.function().args(L2BlockNumberSchema).returns(L2Block.schema.optional()), + getBlockByHash: z.function().args(schemas.Fr).returns(L2Block.schema.optional()), + + getBlockByArchive: z.function().args(schemas.Fr).returns(L2Block.schema.optional()), + getBlockNumber: z.function().returns(z.number()), getProvenBlockNumber: z.function().returns(z.number()), @@ -589,6 +621,10 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getBlockHeader: z.function().args(optional(L2BlockNumberSchema)).returns(BlockHeader.schema.optional()), + getBlockHeaderByHash: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), + + getBlockHeaderByArchive: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), + getValidatorsStats: z.function().returns(ValidatorsStatsSchema), getValidatorStats: z diff --git a/yarn-project/stdlib/src/interfaces/tx_provider.ts b/yarn-project/stdlib/src/interfaces/tx_provider.ts index 1cbde3a3bb1f..4113b69cae5f 100644 --- a/yarn-project/stdlib/src/interfaces/tx_provider.ts +++ b/yarn-project/stdlib/src/interfaces/tx_provider.ts @@ -9,6 +9,7 @@ export interface ITxProvider { getTxsForBlockProposal( blockProposal: BlockProposal, + blockNumber: number, opts: { pinnedPeer: PeerId | undefined; deadline: Date }, ): Promise<{ txs: Tx[]; missingTxs: TxHash[] }>; diff --git a/yarn-project/stdlib/src/interfaces/validator.ts b/yarn-project/stdlib/src/interfaces/validator.ts index 3997dfdccc52..283be4477d50 100644 --- a/yarn-project/stdlib/src/interfaces/validator.ts +++ b/yarn-project/stdlib/src/interfaces/validator.ts @@ -11,6 +11,7 @@ import type { PeerId } from '@libp2p/interface'; import { z } from 'zod'; import type { CommitteeAttestationsAndSigners } from '../block/index.js'; +import { AllowedElementSchema } from './allowed_element.js'; /** * Validator client configuration @@ -43,7 +44,13 @@ export interface ValidatorClientConfig { export type ValidatorClientFullConfig = ValidatorClientConfig & Pick & - Pick; + Pick & { + /** + * Whether transactions are disabled for this node + * @remarks This should match the property in P2PConfig. It's not picked from there to avoid circular dependencies. + */ + disableTransactions?: boolean; + }; export const ValidatorClientConfigSchema = z.object({ validatorAddresses: z.array(schemas.EthAddress).optional(), @@ -55,6 +62,12 @@ export const ValidatorClientConfigSchema = z.object({ alwaysReexecuteBlockProposals: z.boolean().optional(), }) satisfies ZodFor>; +export const ValidatorClientFullConfigSchema = ValidatorClientConfigSchema.extend({ + txPublicSetupAllowList: z.array(AllowedElementSchema).optional(), + slashBroadcastedInvalidBlockPenalty: schemas.BigInt, + disableTransactions: z.boolean().optional(), +}) satisfies ZodFor>; + export interface Validator { start(): Promise; updateConfig(config: Partial): void; diff --git a/yarn-project/stdlib/src/p2p/block_attestation.ts b/yarn-project/stdlib/src/p2p/block_attestation.ts index 94c040597f89..3080b1cc91ac 100644 --- a/yarn-project/stdlib/src/p2p/block_attestation.ts +++ b/yarn-project/stdlib/src/p2p/block_attestation.ts @@ -1,5 +1,5 @@ import { Buffer32 } from '@aztec/foundation/buffer'; -import { keccak256, recoverAddress } from '@aztec/foundation/crypto'; +import { keccak256, tryRecoverAddress } from '@aztec/foundation/crypto'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; @@ -7,8 +7,7 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { z } from 'zod'; -import { type ZodFor, schemas } from '../schemas/index.js'; -import type { UInt32 } from '../types/index.js'; +import type { ZodFor } from '../schemas/index.js'; import { ConsensusPayload } from './consensus_payload.js'; import { Gossipable } from './gossipable.js'; import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage } from './signature_utils.js'; @@ -33,9 +32,6 @@ export class BlockAttestation extends Gossipable { private proposer: EthAddress | undefined; constructor( - /** The block number of the attestation. */ - public readonly blockNumber: UInt32, - /** The payload of the message, and what the signature is over */ public readonly payload: ConsensusPayload, @@ -51,12 +47,11 @@ export class BlockAttestation extends Gossipable { static get schema(): ZodFor { return z .object({ - blockNumber: schemas.UInt32, payload: ConsensusPayload.schema, signature: Signature.schema, proposerSignature: Signature.schema, }) - .transform(obj => new BlockAttestation(obj.blockNumber, obj.payload, obj.signature, obj.proposerSignature)); + .transform(obj => new BlockAttestation(obj.payload, obj.signature, obj.proposerSignature)); } override generateP2PMessageIdentifier(): Promise { @@ -73,41 +68,29 @@ export class BlockAttestation extends Gossipable { /** * Lazily evaluate and cache the signer of the attestation - * @returns The signer of the attestation + * @returns The signer of the attestation, or undefined if signature recovery fails */ - getSender(): EthAddress { + getSender(): EthAddress | undefined { if (!this.sender) { // Recover the sender from the attestation const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeparator.blockAttestation); // Cache the sender for later use - this.sender = recoverAddress(hashed, this.signature); + this.sender = tryRecoverAddress(hashed, this.signature); } return this.sender; } - /** - * Tries to get the sender of the attestation - * @returns The sender of the attestation or undefined if it fails during recovery - */ - tryGetSender(): EthAddress | undefined { - try { - return this.getSender(); - } catch { - return undefined; - } - } - /** * Lazily evaluate and cache the proposer of the block * @returns The proposer of the block */ - getProposer(): EthAddress { + getProposer(): EthAddress | undefined { if (!this.proposer) { // Recover the proposer from the proposal signature const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeparator.blockProposal); // Cache the proposer for later use - this.proposer = recoverAddress(hashed, this.proposerSignature); + this.proposer = tryRecoverAddress(hashed, this.proposerSignature); } return this.proposer; @@ -118,13 +101,12 @@ export class BlockAttestation extends Gossipable { } toBuffer(): Buffer { - return serializeToBuffer([this.blockNumber, this.payload, this.signature, this.proposerSignature]); + return serializeToBuffer([this.payload, this.signature, this.proposerSignature]); } static fromBuffer(buf: Buffer | BufferReader): BlockAttestation { const reader = BufferReader.asReader(buf); return new BlockAttestation( - reader.readNumber(), reader.readObject(ConsensusPayload), reader.readObject(Signature), reader.readObject(Signature), @@ -132,25 +114,19 @@ export class BlockAttestation extends Gossipable { } static empty(): BlockAttestation { - return new BlockAttestation(0, ConsensusPayload.empty(), Signature.empty(), Signature.empty()); + return new BlockAttestation(ConsensusPayload.empty(), Signature.empty(), Signature.empty()); } static random(): BlockAttestation { - return new BlockAttestation( - Math.floor(Math.random() * 1000) + 1, - ConsensusPayload.random(), - Signature.random(), - Signature.random(), - ); + return new BlockAttestation(ConsensusPayload.random(), Signature.random(), Signature.random()); } getSize(): number { - return 4 /* blockNumber */ + this.payload.getSize() + this.signature.getSize() + this.proposerSignature.getSize(); + return this.payload.getSize() + this.signature.getSize() + this.proposerSignature.getSize(); } toInspect() { return { - blockNumber: this.blockNumber, payload: this.payload.toInspect(), signature: this.signature.toString(), proposerSignature: this.proposerSignature.toString(), diff --git a/yarn-project/stdlib/src/p2p/block_proposal.test.ts b/yarn-project/stdlib/src/p2p/block_proposal.test.ts index 062f105787e5..2ef091b25697 100644 --- a/yarn-project/stdlib/src/p2p/block_proposal.test.ts +++ b/yarn-project/stdlib/src/p2p/block_proposal.test.ts @@ -11,17 +11,16 @@ import { ConsensusPayload } from './consensus_payload.js'; class BackwardsCompatibleBlockProposal extends BlockProposal { constructor(payload: ConsensusPayload, signature: Signature) { - super(1, payload, signature, [], undefined); + super(payload, signature, [], undefined); } oldToBuffer(): Buffer { - return serializeToBuffer([this.blockNumber, this.payload, this.signature, 0, []]); + return serializeToBuffer([this.payload, this.signature, 0, []]); } static oldFromBuffer(buf: Buffer | BufferReader): BlockProposal { const reader = BufferReader.asReader(buf); return new BlockProposal( - reader.readNumber(), reader.readObject(ConsensusPayload), reader.readObject(Signature), reader.readArray(0, TxHash), diff --git a/yarn-project/stdlib/src/p2p/block_proposal.ts b/yarn-project/stdlib/src/p2p/block_proposal.ts index aae40dcaa3d3..c619695cbe6b 100644 --- a/yarn-project/stdlib/src/p2p/block_proposal.ts +++ b/yarn-project/stdlib/src/p2p/block_proposal.ts @@ -1,5 +1,5 @@ import { Buffer32 } from '@aztec/foundation/buffer'; -import { keccak256, recoverAddress } from '@aztec/foundation/crypto'; +import { keccak256, tryRecoverAddress } from '@aztec/foundation/crypto'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; @@ -8,7 +8,6 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import type { L2BlockInfo } from '../block/l2_block_info.js'; import { TxHash } from '../tx/index.js'; import { Tx } from '../tx/tx.js'; -import type { UInt32 } from '../types/index.js'; import { ConsensusPayload } from './consensus_payload.js'; import { Gossipable } from './gossipable.js'; import { @@ -40,9 +39,6 @@ export class BlockProposal extends Gossipable { private sender: EthAddress | undefined; constructor( - /** The number of the block */ - public readonly blockNumber: UInt32, - /** The payload of the message, and what the signature is over */ public readonly payload: ConsensusPayload, @@ -71,9 +67,8 @@ export class BlockProposal extends Gossipable { return this.payload.header.slotNumber; } - toBlockInfo(): L2BlockInfo { + toBlockInfo(): Omit { return { - blockNumber: this.blockNumber, slotNumber: this.slotNumber.toNumber(), lastArchive: this.payload.header.lastArchiveRoot, timestamp: this.payload.header.timestamp, @@ -83,7 +78,6 @@ export class BlockProposal extends Gossipable { } static async createProposalFromSigner( - blockNumber: UInt32, payload: ConsensusPayload, txHashes: TxHash[], // Note(md): Provided separately to tx hashes such that this function can be optional @@ -93,17 +87,18 @@ export class BlockProposal extends Gossipable { const hashed = getHashedSignaturePayload(payload, SignatureDomainSeparator.blockProposal); const sig = await payloadSigner(hashed); - return new BlockProposal(blockNumber, payload, sig, txHashes, txs); + return new BlockProposal(payload, sig, txHashes, txs); } /**Get Sender * Lazily evaluate the sender of the proposal; result is cached + * @returns The sender address, or undefined if signature recovery fails */ - getSender() { + getSender(): EthAddress | undefined { if (!this.sender) { const hashed = getHashedSignaturePayloadEthSignedMessage(this.payload, SignatureDomainSeparator.blockProposal); // Cache the sender for later use - this.sender = recoverAddress(hashed, this.signature); + this.sender = tryRecoverAddress(hashed, this.signature); } return this.sender; @@ -114,7 +109,7 @@ export class BlockProposal extends Gossipable { } toBuffer(): Buffer { - const buffer: any[] = [this.blockNumber, this.payload, this.signature, this.txHashes.length, this.txHashes]; + const buffer: any[] = [this.payload, this.signature, this.txHashes.length, this.txHashes]; if (this.txs) { buffer.push(this.txs.length); buffer.push(this.txs); @@ -125,22 +120,20 @@ export class BlockProposal extends Gossipable { static fromBuffer(buf: Buffer | BufferReader): BlockProposal { const reader = BufferReader.asReader(buf); - const blockNumber = reader.readNumber(); const payload = reader.readObject(ConsensusPayload); const sig = reader.readObject(Signature); const txHashes = reader.readArray(reader.readNumber(), TxHash); if (!reader.isEmpty()) { const txs = reader.readArray(reader.readNumber(), Tx); - return new BlockProposal(blockNumber, payload, sig, txHashes, txs); + return new BlockProposal(payload, sig, txHashes, txs); } - return new BlockProposal(blockNumber, payload, sig, txHashes); + return new BlockProposal(payload, sig, txHashes); } getSize(): number { return ( - 4 /* blockNumber */ + this.payload.getSize() + this.signature.getSize() + 4 /* txHashes.length */ + diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index 793d57697eed..14acf75e23a5 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -288,12 +288,9 @@ export const makeAndSignCommitteeAttestationsAndSigners = ( }; export const makeBlockProposal = (options?: MakeConsensusPayloadOptions): BlockProposal => { - const { blockNumber, payload, signature } = makeAndSignConsensusPayload( - SignatureDomainSeparator.blockProposal, - options, - ); + const { payload, signature } = makeAndSignConsensusPayload(SignatureDomainSeparator.blockProposal, options); const txHashes = options?.txHashes ?? [0, 1, 2, 3, 4, 5].map(() => TxHash.random()); - return new BlockProposal(blockNumber, payload, signature, txHashes, options?.txs ?? []); + return new BlockProposal(payload, signature, txHashes, options?.txs ?? []); }; // TODO(https://github.com/AztecProtocol/aztec-packages/issues/8028) @@ -321,7 +318,7 @@ export const makeBlockAttestation = (options?: MakeConsensusPayloadOptions): Blo const proposalHash = getHashedSignaturePayloadEthSignedMessage(payload, SignatureDomainSeparator.blockProposal); const proposerSignature = proposerSigner.sign(proposalHash); - return new BlockAttestation(header.globalVariables.blockNumber, payload, attestationSignature, proposerSignature); + return new BlockAttestation(payload, attestationSignature, proposerSignature); }; export const makeBlockAttestationFromBlock = ( @@ -349,7 +346,7 @@ export const makeBlockAttestationFromBlock = ( const proposalSignerToUse = proposerSigner ?? Secp256k1Signer.random(); const proposerSignature = proposalSignerToUse.sign(proposalHash); - return new BlockAttestation(header.globalVariables.blockNumber, payload, attestationSignature, proposerSignature); + return new BlockAttestation(payload, attestationSignature, proposerSignature); }; export async function randomPublishedL2Block( diff --git a/yarn-project/stdlib/src/zkpassport/index.ts b/yarn-project/stdlib/src/zkpassport/index.ts index 18530a1e9ee3..bb44bf0ed2a9 100644 --- a/yarn-project/stdlib/src/zkpassport/index.ts +++ b/yarn-project/stdlib/src/zkpassport/index.ts @@ -5,15 +5,21 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { withoutHexPrefix } from '@aztec/foundation/string'; export type ViemZkPassportProofParams = { - vkeyHash: `0x${string}`; - proof: `0x${string}`; - publicInputs: `0x${string}`[]; - committedInputs: `0x${string}`; - committedInputCounts: bigint[]; - validityPeriodInSeconds: bigint; - domain: string; - scope: string; - devMode: boolean; + proofVerificationData: { + vkeyHash: `0x${string}`; + proof: `0x${string}`; + publicInputs: `0x${string}`[]; + }; + commitments: { + committedInputs: `0x${string}`; + committedInputCounts: bigint[]; + }; + serviceConfig: { + validityPeriodInSeconds: bigint; + domain: string; + scope: string; + devMode: boolean; + }; }; // NOTE: Must match the ZkPassportProofParams struct in the zkpassport verifier contract @@ -64,7 +70,7 @@ export class ZkPassportProofParams { publicInputs, committedInputs, committedInputCounts, - BigInt(100 * 60 * 60 * 24), + BigInt(7 * 24 * 60 * 60), // 7 days 'sequencer.alpha-testnet.aztec.network', 'personhood', ); @@ -87,29 +93,35 @@ export class ZkPassportProofParams { static fromViem(params: ViemZkPassportProofParams) { return new ZkPassportProofParams( - params.devMode, - Buffer32.fromString(params.vkeyHash), - Buffer.from(withoutHexPrefix(params.proof), 'hex'), - params.publicInputs.map(input => Fr.fromString(input)), - Buffer.from(withoutHexPrefix(params.committedInputs), 'hex'), - params.committedInputCounts, - params.validityPeriodInSeconds, - params.domain, - params.scope, + params.serviceConfig.devMode, + Buffer32.fromString(params.proofVerificationData.vkeyHash), + Buffer.from(withoutHexPrefix(params.proofVerificationData.proof), 'hex'), + params.proofVerificationData.publicInputs.map(input => Fr.fromString(input)), + Buffer.from(withoutHexPrefix(params.commitments.committedInputs), 'hex'), + params.commitments.committedInputCounts, + params.serviceConfig.validityPeriodInSeconds, + params.serviceConfig.domain, + params.serviceConfig.scope, ); } toViem(): ViemZkPassportProofParams { return { - devMode: this.devMode, - vkeyHash: this.vkeyHash.toString(), - proof: `0x${this.proof.toString('hex')}`, - publicInputs: this.publicInputs.map(input => input.toString()), - committedInputs: `0x${this.committedInputs.toString('hex')}`, - committedInputCounts: this.committedInputCounts, - validityPeriodInSeconds: this.validityPeriodInSeconds, - domain: this.domain, - scope: this.scope, + serviceConfig: { + devMode: this.devMode, + validityPeriodInSeconds: this.validityPeriodInSeconds, + domain: this.domain, + scope: this.scope, + }, + proofVerificationData: { + vkeyHash: this.vkeyHash.toString(), + proof: `0x${this.proof.toString('hex')}`, + publicInputs: this.publicInputs.map(input => input.toString()), + }, + commitments: { + committedInputs: `0x${this.committedInputs.toString('hex')}`, + committedInputCounts: this.committedInputCounts, + }, }; } } diff --git a/yarn-project/txe/src/state_machine/archiver.ts b/yarn-project/txe/src/state_machine/archiver.ts index 4f2ca9523537..1430f32ab99d 100644 --- a/yarn-project/txe/src/state_machine/archiver.ts +++ b/yarn-project/txe/src/state_machine/archiver.ts @@ -1,5 +1,7 @@ import { ArchiverStoreHelper, KVArchiverDataStore, type PublishedL2Block } from '@aztec/archiver'; +import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants'; import type { EthAddress } from '@aztec/foundation/eth-address'; +import { Fr } from '@aztec/foundation/fields'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { L2Block, L2BlockSource, L2Tips, ValidateBlockResult } from '@aztec/stdlib/block'; @@ -115,6 +117,10 @@ export class TXEArchiver extends ArchiverStoreHelper implements L2BlockSource { throw new Error('TXE Archiver does not implement "getL2Constants"'); } + public getGenesisValues(): Promise<{ genesisArchiveRoot: Fr }> { + return Promise.resolve({ genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT) }); + } + public syncImmediate(): Promise { throw new Error('TXE Archiver does not implement "syncImmediate"'); } diff --git a/yarn-project/validator-client/src/block_proposal_handler.ts b/yarn-project/validator-client/src/block_proposal_handler.ts index b53d96ab4ef7..51090fa2f800 100644 --- a/yarn-project/validator-client/src/block_proposal_handler.ts +++ b/yarn-project/validator-client/src/block_proposal_handler.ts @@ -1,4 +1,5 @@ import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; +import { TimeoutError } from '@aztec/foundation/error'; import { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import { retryUntil } from '@aztec/foundation/retry'; @@ -7,12 +8,12 @@ import type { P2P, PeerId } from '@aztec/p2p'; import { TxProvider } from '@aztec/p2p'; import { BlockProposalValidator } from '@aztec/p2p/msg_validators'; import { computeInHashFromL1ToL2Messages } from '@aztec/prover-client/helpers'; -import type { L2BlockSource } from '@aztec/stdlib/block'; +import type { L2Block, L2BlockSource } from '@aztec/stdlib/block'; import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import type { IFullNodeBlockBuilder, ValidatorClientFullConfig } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { type BlockProposal, ConsensusPayload } from '@aztec/stdlib/p2p'; -import { type FailedTx, GlobalVariables, type Tx } from '@aztec/stdlib/tx'; +import { BlockHeader, type FailedTx, GlobalVariables, type Tx } from '@aztec/stdlib/tx'; import { ReExFailedTxsError, ReExStateMismatchError, @@ -26,7 +27,7 @@ import type { ValidatorMetrics } from './metrics.js'; export type BlockProposalValidationFailureReason = | 'invalid_proposal' | 'parent_block_not_found' - | 'parent_block_does_not_match' + | 'parent_block_wrong_slot' | 'in_hash_mismatch' | 'block_number_already_exists' | 'txs_not_available' @@ -35,16 +36,27 @@ export type BlockProposalValidationFailureReason = | 'timeout' | 'unknown_error'; -export interface BlockProposalValidationResult { - isValid: boolean; - reason?: BlockProposalValidationFailureReason; - reexecutionResult?: { - block: any; - failedTxs: FailedTx[]; - reexecutionTimeMs: number; - totalManaUsed: number; - }; -} +type ReexecuteTransactionsResult = { + block: L2Block; + failedTxs: FailedTx[]; + reexecutionTimeMs: number; + totalManaUsed: number; +}; + +export type BlockProposalValidationSuccessResult = { + isValid: true; + blockNumber: number; + reexecutionResult?: ReexecuteTransactionsResult; +}; + +export type BlockProposalValidationFailureResult = { + isValid: false; + reason: BlockProposalValidationFailureReason; + blockNumber?: number; + reexecutionResult?: ReexecuteTransactionsResult; +}; + +export type BlockProposalValidationResult = BlockProposalValidationSuccessResult | BlockProposalValidationFailureResult; export class BlockProposalHandler { public readonly tracer: Tracer; @@ -68,16 +80,16 @@ export class BlockProposalHandler { const handler = async (proposal: BlockProposal, proposalSender: PeerId) => { try { const result = await this.handleBlockProposal(proposal, proposalSender, true); - if (result.isValid && result.reexecutionResult) { + if (result.isValid) { this.log.info(`Non-validator reexecution completed for slot ${proposal.slotNumber.toBigInt()}`, { - blockNumber: proposal.blockNumber, - reexecutionTimeMs: result.reexecutionResult.reexecutionTimeMs, - totalManaUsed: result.reexecutionResult.totalManaUsed, - numTxs: result.reexecutionResult.block?.body?.txEffects?.length ?? 0, + blockNumber: result.blockNumber, + reexecutionTimeMs: result.reexecutionResult?.reexecutionTimeMs, + totalManaUsed: result.reexecutionResult?.totalManaUsed, + numTxs: result.reexecutionResult?.block?.body?.txEffects?.length ?? 0, }); } else { this.log.warn(`Non-validator reexecution failed for slot ${proposal.slotNumber.toBigInt()}`, { - blockNumber: proposal.blockNumber, + blockNumber: result.blockNumber, reason: result.reason, }); } @@ -97,8 +109,14 @@ export class BlockProposalHandler { shouldReexecute: boolean, ): Promise { const slotNumber = proposal.slotNumber.toBigInt(); - const blockNumber = proposal.blockNumber; const proposer = proposal.getSender(); + const config = this.blockBuilder.getConfig(); + + // Reject proposals with invalid signatures + if (!proposer) { + this.log.warn(`Received proposal with invalid signature for slot ${slotNumber}`); + return { isValid: false, reason: 'invalid_proposal' }; + } const proposalInfo = { ...proposal.toBlockInfo(), proposer: proposer.toString() }; this.log.info(`Processing proposal for slot ${slotNumber}`, { @@ -114,52 +132,40 @@ export class BlockProposalHandler { return { isValid: false, reason: 'invalid_proposal' }; } - // Collect txs from the proposal. We start doing this as early as possible, - // and we do it even if we don't plan to re-execute the txs, so that we have them - // if another node needs them. - const config = this.blockBuilder.getConfig(); - const { txs, missingTxs } = await this.txProvider.getTxsForBlockProposal(proposal, { - pinnedPeer: proposalSender, - deadline: this.getReexecutionDeadline(proposal, config), - }); - // Check that the parent proposal is a block we know, otherwise reexecution would fail - if (blockNumber > INITIAL_L2_BLOCK_NUM) { - const deadline = this.getReexecutionDeadline(proposal, config); - const currentTime = this.dateProvider.now(); - const timeoutDurationMs = deadline.getTime() - currentTime; - const parentBlock = - timeoutDurationMs <= 0 - ? undefined - : await retryUntil( - async () => { - const block = await this.blockSource.getBlock(blockNumber - 1); - if (block) { - return block; - } - await this.blockSource.syncImmediate(); - return await this.blockSource.getBlock(blockNumber - 1); - }, - 'Force Archiver Sync', - timeoutDurationMs / 1000, - 0.5, - ); + const parentBlockHeader = await this.getParentBlock(proposal); + if (parentBlockHeader === undefined) { + this.log.warn(`Parent block for proposal not found, skipping processing`, proposalInfo); + return { isValid: false, reason: 'parent_block_not_found' }; + } - if (parentBlock === undefined) { - this.log.warn(`Parent block for ${blockNumber} not found, skipping processing`, proposalInfo); - return { isValid: false, reason: 'parent_block_not_found' }; - } + // Check that the parent block's slot is less than the proposal's slot (should not happen, but we check anyway) + if (parentBlockHeader !== 'genesis' && parentBlockHeader.getSlot() >= slotNumber) { + this.log.warn(`Parent block slot is greater than or equal to proposal slot, skipping processing`, { + parentBlockSlot: parentBlockHeader.getSlot().toString(), + proposalSlot: slotNumber.toString(), + ...proposalInfo, + }); + return { isValid: false, reason: 'parent_block_wrong_slot' }; + } - if (!proposal.payload.header.lastArchiveRoot.equals(parentBlock.archive.root)) { - this.log.warn(`Parent block archive root for proposal does not match, skipping processing`, { - proposalLastArchiveRoot: proposal.payload.header.lastArchiveRoot.toString(), - parentBlockArchiveRoot: parentBlock.archive.root.toString(), - ...proposalInfo, - }); - return { isValid: false, reason: 'parent_block_does_not_match' }; - } + // Compute the block number based on the parent block + const blockNumber = parentBlockHeader === 'genesis' ? INITIAL_L2_BLOCK_NUM : parentBlockHeader.getBlockNumber() + 1; + + // Check that this block number does not exist already + const existingBlock = await this.blockSource.getBlockHeader(blockNumber); + if (existingBlock) { + this.log.warn(`Block number ${blockNumber} already exists, skipping processing`, proposalInfo); + return { isValid: false, blockNumber, reason: 'block_number_already_exists' }; } + // Collect txs from the proposal. We start doing this as early as possible, + // and we do it even if we don't plan to re-execute the txs, so that we have them if another node needs them. + const { txs, missingTxs } = await this.txProvider.getTxsForBlockProposal(proposal, blockNumber, { + pinnedPeer: proposalSender, + deadline: this.getReexecutionDeadline(slotNumber, config), + }); + // Check that I have the same set of l1ToL2Messages as the proposal const l1ToL2Messages = await this.l1ToL2MessageSource.getL1ToL2Messages(blockNumber); const computedInHash = await computeInHashFromL1ToL2Messages(l1ToL2Messages); @@ -170,20 +176,13 @@ export class BlockProposalHandler { computedInHash: computedInHash.toString(), ...proposalInfo, }); - return { isValid: false, reason: 'in_hash_mismatch' }; - } - - // Check that this block number does not exist already - const existingBlock = await this.blockSource.getBlockHeader(blockNumber); - if (existingBlock) { - this.log.warn(`Block number ${blockNumber} already exists, skipping processing`, proposalInfo); - return { isValid: false, reason: 'block_number_already_exists' }; + return { isValid: false, blockNumber, reason: 'in_hash_mismatch' }; } // Check that all of the transactions in the proposal are available if (missingTxs.length > 0) { this.log.warn(`Missing ${missingTxs.length} txs to process proposal`, { ...proposalInfo, missingTxs }); - return { isValid: false, reason: 'txs_not_available' }; + return { isValid: false, blockNumber, reason: 'txs_not_available' }; } // Try re-executing the transactions in the proposal if needed @@ -191,23 +190,57 @@ export class BlockProposalHandler { if (shouldReexecute) { try { this.log.verbose(`Re-executing transactions in the proposal`, proposalInfo); - reexecutionResult = await this.reexecuteTransactions(proposal, txs, l1ToL2Messages); + reexecutionResult = await this.reexecuteTransactions(proposal, blockNumber, txs, l1ToL2Messages); } catch (error) { this.log.error(`Error reexecuting txs while processing block proposal`, error, proposalInfo); const reason = this.getReexecuteFailureReason(error); - return { isValid: false, reason, reexecutionResult }; + return { isValid: false, blockNumber, reason, reexecutionResult }; } } this.log.info(`Successfully processed proposal for slot ${slotNumber}`, proposalInfo); - return { isValid: true, reexecutionResult }; + return { isValid: true, blockNumber, reexecutionResult }; } - private getReexecutionDeadline( - proposal: BlockProposal, - config: { l1GenesisTime: bigint; slotDuration: number }, - ): Date { - const nextSlotTimestampSeconds = Number(getTimestampForSlot(proposal.slotNumber.toBigInt() + 1n, config)); + private async getParentBlock(proposal: BlockProposal): Promise<'genesis' | BlockHeader | undefined> { + const parentArchive = proposal.payload.header.lastArchiveRoot; + const slot = proposal.slotNumber.toBigInt(); + const config = this.blockBuilder.getConfig(); + const { genesisArchiveRoot } = await this.blockSource.getGenesisValues(); + + if (parentArchive.equals(genesisArchiveRoot)) { + return 'genesis'; + } + + const deadline = this.getReexecutionDeadline(slot, config); + const currentTime = this.dateProvider.now(); + const timeoutDurationMs = deadline.getTime() - currentTime; + + try { + return ( + (await this.blockSource.getBlockHeaderByArchive(parentArchive)) ?? + (timeoutDurationMs <= 0 + ? undefined + : await retryUntil( + () => + this.blockSource.syncImmediate().then(() => this.blockSource.getBlockHeaderByArchive(parentArchive)), + 'force archiver sync', + timeoutDurationMs / 1000, + 0.5, + )) + ); + } catch (err) { + if (err instanceof TimeoutError) { + this.log.debug(`Timed out getting parent block by archive root`, { parentArchive }); + } else { + this.log.error('Error getting parent block by archive root', err, { parentArchive }); + } + return undefined; + } + } + + private getReexecutionDeadline(slot: bigint, config: { l1GenesisTime: bigint; slotDuration: number }): Date { + const nextSlotTimestampSeconds = Number(getTimestampForSlot(slot + 1n, config)); const msNeededForPropagationAndPublishing = this.config.validatorReexecuteDeadlineMs; return new Date(nextSlotTimestampSeconds * 1000 - msNeededForPropagationAndPublishing); } @@ -219,21 +252,17 @@ export class BlockProposalHandler { return 'failed_txs'; } else if (err instanceof ReExTimeoutError) { return 'timeout'; - } else if (err instanceof Error) { + } else { return 'unknown_error'; } } async reexecuteTransactions( proposal: BlockProposal, + blockNumber: number, txs: Tx[], l1ToL2Messages: Fr[], - ): Promise<{ - block: any; - failedTxs: FailedTx[]; - reexecutionTimeMs: number; - totalManaUsed: number; - }> { + ): Promise { const { header } = proposal.payload; const { txHashes } = proposal; @@ -254,14 +283,14 @@ export class BlockProposalHandler { coinbase: proposal.payload.header.coinbase, // set arbitrarily by the proposer feeRecipient: proposal.payload.header.feeRecipient, // set arbitrarily by the proposer gasFees: proposal.payload.header.gasFees, // validated by the rollup contract - blockNumber: proposal.blockNumber, // checked blockNumber-1 exists in archiver but blockNumber doesnt + blockNumber, // computed from the parent block and checked it does not exist in archiver timestamp: header.timestamp, // checked in the rollup contract against the slot number chainId: new Fr(config.l1ChainId), version: new Fr(config.rollupVersion), }); const { block, failedTxs } = await this.blockBuilder.buildBlock(txs, l1ToL2Messages, globalVariables, { - deadline: this.getReexecutionDeadline(proposal, config), + deadline: this.getReexecutionDeadline(proposal.payload.header.slotNumber.toBigInt(), config), }); const numFailedTxs = failedTxs.length; diff --git a/yarn-project/validator-client/src/duties/validation_service.test.ts b/yarn-project/validator-client/src/duties/validation_service.test.ts index f28d36be5e6b..99f0e1d34217 100644 --- a/yarn-project/validator-client/src/duties/validation_service.test.ts +++ b/yarn-project/validator-client/src/duties/validation_service.test.ts @@ -25,20 +25,11 @@ describe('ValidationService', () => { it('creates a proposal with txs appended', async () => { const txs = await Promise.all([Tx.random(), Tx.random()]); const { - blockNumber, payload: { header, archive, stateReference }, } = makeBlockProposal({ txs }); - const proposal = await service.createBlockProposal( - blockNumber, - header, - archive, - stateReference, - txs, - addresses[0], - { - publishFullTxs: true, - }, - ); + const proposal = await service.createBlockProposal(header, archive, stateReference, txs, addresses[0], { + publishFullTxs: true, + }); expect(proposal.getSender()).toEqual(store.getAddress(0)); expect(proposal.txs).toBeDefined(); expect(proposal.txs).toBe(txs); @@ -47,20 +38,11 @@ describe('ValidationService', () => { it('creates a proposal without txs appended', async () => { const txs = await Promise.all([Tx.random(), Tx.random()]); const { - blockNumber, payload: { header, archive, stateReference }, } = makeBlockProposal({ txs }); - const proposal = await service.createBlockProposal( - blockNumber, - header, - archive, - stateReference, - txs, - addresses[0], - { - publishFullTxs: false, - }, - ); + const proposal = await service.createBlockProposal(header, archive, stateReference, txs, addresses[0], { + publishFullTxs: false, + }); expect(proposal.getSender()).toEqual(addresses[0]); expect(proposal.txs).toBeUndefined(); }); diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 7af5e978387e..345895bc6b02 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -21,7 +21,6 @@ export class ValidationService { /** * Create a block proposal with the given header, archive, and transactions * - * @param blockNumber - The block number this proposal is for * @param header - The block header * @param archive - The archive of the current block * @param txs - TxHash[] ordered list of transactions @@ -29,7 +28,6 @@ export class ValidationService { * @returns A block proposal signing the above information (not the current implementation!!!) */ async createBlockProposal( - blockNumber: number, header: ProposedBlockHeader, archive: Fr, stateReference: StateReference, @@ -49,7 +47,6 @@ export class ValidationService { const txHashes = await Promise.all(txs.map(tx => tx.getTxHash())); return BlockProposal.createProposalFromSigner( - blockNumber, new ConsensusPayload(header, archive, stateReference), txHashes, options.publishFullTxs ? txs : undefined, @@ -74,7 +71,7 @@ export class ValidationService { const signatures = await Promise.all( attestors.map(attestor => this.keyStore.signMessageWithAddress(attestor, buf)), ); - return signatures.map(sig => new BlockAttestation(proposal.blockNumber, proposal.payload, sig, proposal.signature)); + return signatures.map(sig => new BlockAttestation(proposal.payload, sig, proposal.signature)); } async signAttestationsAndSigners( diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index a1ab5cf6dd44..4d2308a7d0b0 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -24,7 +24,9 @@ export function createBlockProposalHandler( }, ) { const metrics = new ValidatorMetrics(deps.telemetry); - const blockProposalValidator = new BlockProposalValidator(deps.epochCache); + const blockProposalValidator = new BlockProposalValidator(deps.epochCache, { + txsPermitted: !config.disableTransactions, + }); return new BlockProposalHandler( deps.blockBuilder, deps.blockSource, diff --git a/yarn-project/validator-client/src/metrics.ts b/yarn-project/validator-client/src/metrics.ts index 5f0b73e80fb9..92171e9b2420 100644 --- a/yarn-project/validator-client/src/metrics.ts +++ b/yarn-project/validator-client/src/metrics.ts @@ -62,9 +62,10 @@ export class ValidatorMetrics { } public recordFailedReexecution(proposal: BlockProposal) { + const proposer = proposal.getSender(); this.failedReexecutionCounter.add(1, { [Attributes.STATUS]: 'failed', - [Attributes.BLOCK_PROPOSER]: proposal.getSender().toString(), + [Attributes.BLOCK_PROPOSER]: proposer?.toString() ?? 'unknown', }); } diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index cd01f88af1b2..1fb413528108 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -1,3 +1,4 @@ +import { GENESIS_ARCHIVE_ROOT } from '@aztec/constants'; import type { EpochCache } from '@aztec/epoch-cache'; import { Buffer32 } from '@aztec/foundation/buffer'; import { times } from '@aztec/foundation/collection'; @@ -38,7 +39,8 @@ import { type ValidatorClientConfig, validatorClientConfigMappings } from './con import { ValidatorClient } from './validator.js'; describe('ValidatorClient', () => { - let config: ValidatorClientConfig & Pick; + let config: ValidatorClientConfig & + Pick & { disableTransactions: boolean }; let validatorClient: ValidatorClient; let p2pClient: MockProxy; let blockSource: MockProxy; @@ -75,6 +77,7 @@ describe('ValidatorClient', () => { validatorReexecute: false, validatorReexecuteDeadlineMs: 6000, slashBroadcastedInvalidBlockPenalty: 1n, + disableTransactions: false, }; const keyStore: KeyStore = { @@ -220,6 +223,7 @@ describe('ValidatorClient', () => { describe('attestToProposal', () => { let proposal: BlockProposal; + let blockNumber: number; let sender: PeerId; let blockBuildResult: BuildBlockResult; @@ -234,6 +238,7 @@ describe('ValidatorClient', () => { const emptyInHash = await computeInHashFromL1ToL2Messages([]); const contentCommitment = new ContentCommitment(Fr.random(), emptyInHash, Fr.random()); const blockHeader = makeHeader(1, 100, 100, { contentCommitment }); + blockNumber = blockHeader.getBlockNumber(); proposal = makeBlockProposal({ header: blockHeader }); // Set the current time to the start of the slot of the proposal const genesisTime = 1n; @@ -261,9 +266,13 @@ describe('ValidatorClient', () => { }); epochCache.filterInCommittee.mockResolvedValue([EthAddress.fromString(validatorAccounts[0].address)]); - blockSource.getBlock.mockResolvedValue({ - archive: new AppendOnlyTreeSnapshot(proposal.payload.header.lastArchiveRoot, proposal.blockNumber), - } as L2Block); + // Return parent block when requested + blockSource.getBlockHeaderByArchive.mockResolvedValue({ + getBlockNumber: () => blockNumber - 1, + getSlot: () => blockHeader.getSlot() - 1n, + } as BlockHeader); + + blockSource.getGenesisValues.mockResolvedValue({ genesisArchiveRoot: new Fr(GENESIS_ARCHIVE_ROOT) }); blockSource.syncImmediate.mockImplementation(() => Promise.resolve()); blockBuildResult = { @@ -277,7 +286,7 @@ describe('ValidatorClient', () => { block: { header: blockHeader.clone(), body: { txEffects: times(proposal.txHashes.length, () => ({})) }, - archive: new AppendOnlyTreeSnapshot(proposal.archive, proposal.blockNumber), + archive: new AppendOnlyTreeSnapshot(proposal.archive, blockNumber), } as L2Block, }; }); @@ -291,11 +300,11 @@ describe('ValidatorClient', () => { it('should wait for previous block to sync', async () => { epochCache.filterInCommittee.mockResolvedValue([EthAddress.fromString(validatorAccounts[0].address)]); - blockSource.getBlock.mockResolvedValueOnce(undefined); - blockSource.getBlock.mockResolvedValueOnce(undefined); - blockSource.getBlock.mockResolvedValueOnce(undefined); + blockSource.getBlockHeaderByArchive.mockResolvedValueOnce(undefined); + blockSource.getBlockHeaderByArchive.mockResolvedValueOnce(undefined); + blockSource.getBlockHeaderByArchive.mockResolvedValueOnce(undefined); const attestations = await validatorClient.attestToProposal(proposal, sender); - expect(blockSource.getBlock).toHaveBeenCalledTimes(4); + expect(blockSource.getBlockHeaderByArchive).toHaveBeenCalledTimes(4); expect(attestations).toBeDefined(); expect(attestations?.length).toBe(1); }); @@ -318,9 +327,10 @@ describe('ValidatorClient', () => { // We should emit WANT_TO_SLASH_EVENT const proposer = proposal.getSender(); + expect(proposer).toBeDefined(); expect(emitSpy).toHaveBeenCalledWith(WANT_TO_SLASH_EVENT, [ { - validator: proposer, + validator: proposer!, amount: config.slashBroadcastedInvalidBlockPenalty, offenseType: OffenseType.BROADCASTED_INVALID_BLOCK_PROPOSAL, epochOrSlot: expect.any(BigInt), @@ -343,7 +353,7 @@ describe('ValidatorClient', () => { blockSource.getBlockHeader.mockResolvedValue({} as BlockHeader); const attestations = await validatorClient.attestToProposal(proposal, sender); expect(attestations).toBeUndefined(); - expect(blockSource.getBlockHeader).toHaveBeenCalledWith(proposal.blockNumber); + expect(blockSource.getBlockHeader).toHaveBeenCalledWith(blockNumber); }); it('should not emit WANT_TO_SLASH_EVENT if slashing is disabled', async () => { @@ -364,6 +374,7 @@ describe('ValidatorClient', () => { expect(txProvider.getTxsForBlockProposal).toHaveBeenCalledWith( proposal, + blockNumber, expect.objectContaining({ pinnedPeer: sender }), ); }); @@ -376,6 +387,7 @@ describe('ValidatorClient', () => { expect(txProvider.getTxsForBlockProposal).toHaveBeenCalledWith( proposal, + blockNumber, expect.objectContaining({ pinnedPeer: sender }), ); }); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index f8445a0731ff..41ec80119d48 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -9,13 +9,7 @@ import { DateProvider } from '@aztec/foundation/timer'; import type { KeystoreManager } from '@aztec/node-keystore'; import type { P2P, PeerId, TxProvider } from '@aztec/p2p'; import { AuthRequest, AuthResponse, BlockProposalValidator, ReqRespSubProtocol } from '@aztec/p2p'; -import { - OffenseType, - type SlasherConfig, - WANT_TO_SLASH_EVENT, - type Watcher, - type WatcherEmitter, -} from '@aztec/slasher'; +import { OffenseType, WANT_TO_SLASH_EVENT, type Watcher, type WatcherEmitter } from '@aztec/slasher'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { CommitteeAttestationsAndSigners, L2BlockSource } from '@aztec/stdlib/block'; import type { IFullNodeBlockBuilder, Validator, ValidatorClientFullConfig } from '@aztec/stdlib/interfaces/server'; @@ -29,7 +23,6 @@ import { EventEmitter } from 'events'; import type { TypedDataDefinition } from 'viem'; import { BlockProposalHandler, type BlockProposalValidationFailureReason } from './block_proposal_handler.js'; -import type { ValidatorClientConfig } from './config.js'; import { ValidationService } from './duties/validation_service.js'; import { NodeKeystoreAdapter } from './key_store/node_keystore_adapter.js'; import { ValidatorMetrics } from './metrics.js'; @@ -134,7 +127,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } static new( - config: ValidatorClientConfig & Pick, + config: ValidatorClientFullConfig, blockBuilder: IFullNodeBlockBuilder, epochCache: EpochCache, p2pClient: P2P, @@ -146,7 +139,9 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) telemetry: TelemetryClient = getTelemetryClient(), ) { const metrics = new ValidatorMetrics(telemetry); - const blockProposalValidator = new BlockProposalValidator(epochCache); + const blockProposalValidator = new BlockProposalValidator(epochCache, { + txsPermitted: !config.disableTransactions, + }); const blockProposalHandler = new BlockProposalHandler( blockBuilder, blockSource, @@ -183,8 +178,13 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } // Proxy method for backwards compatibility with tests - public reExecuteTransactions(proposal: BlockProposal, txs: any[], l1ToL2Messages: Fr[]): Promise { - return this.blockProposalHandler.reexecuteTransactions(proposal, txs, l1ToL2Messages); + public reExecuteTransactions( + proposal: BlockProposal, + blockNumber: number, + txs: any[], + l1ToL2Messages: Fr[], + ): Promise { + return this.blockProposalHandler.reexecuteTransactions(proposal, blockNumber, txs, l1ToL2Messages); } public signWithAddress(addr: EthAddress, msg: TypedDataDefinition) { @@ -256,13 +256,19 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) const slotNumber = proposal.slotNumber.toBigInt(); const proposer = proposal.getSender(); + // Reject proposals with invalid signatures + if (!proposer) { + this.log.warn(`Received proposal with invalid signature for slot ${slotNumber}`); + return undefined; + } + // Check that I have any address in current committee before attesting const inCommittee = await this.epochCache.filterInCommittee(slotNumber, this.getValidatorAddresses()); const partOfCommittee = inCommittee.length > 0; const incFailedAttestation = (reason: string) => this.metrics.incFailedAttestations(1, reason, partOfCommittee); const proposalInfo = { ...proposal.toBlockInfo(), proposer: proposer.toString() }; - this.log.info(`Received proposal for block ${proposal.blockNumber} at slot ${slotNumber}`, { + this.log.info(`Received proposal for slot ${slotNumber}`, { ...proposalInfo, txHashes: proposal.txHashes.map(t => t.toString()), }); @@ -304,7 +310,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } // Provided all of the above checks pass, we can attest to the proposal - this.log.info(`Attesting to proposal for block ${proposal.blockNumber} at slot ${slotNumber}`, proposalInfo); + this.log.info(`Attesting to proposal for block at slot ${slotNumber}`, proposalInfo); this.metrics.incAttestations(inCommittee.length); // If the above function does not throw an error, then we can attest to the proposal @@ -314,6 +320,12 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) private slashInvalidBlock(proposal: BlockProposal) { const proposer = proposal.getSender(); + // Skip if signature is invalid (shouldn't happen since we validate earlier) + if (!proposer) { + this.log.warn(`Cannot slash proposal with invalid signature`); + return; + } + // Trim the set if it's too big. if (this.proposersOfInvalidBlocks.size > MAX_PROPOSERS_OF_INVALID_BLOCKS) { // remove oldest proposer. `values` is guaranteed to be in insertion order. @@ -347,7 +359,6 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) } const newProposal = await this.validationService.createBlockProposal( - blockNumber, header, archive, stateReference, @@ -402,7 +413,7 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) attestation => { if (!attestation.payload.equals(proposal.payload)) { this.log.warn( - `Received attestation for slot ${slot} with mismatched payload from ${attestation.getSender().toString()}`, + `Received attestation for slot ${slot} with mismatched payload from ${attestation.getSender()?.toString()}`, { attestationPayload: attestation.payload, proposalPayload: proposal.payload }, ); return false; @@ -415,9 +426,14 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) const oldSenders = attestations.map(attestation => attestation.getSender()); for (const collected of collectedAttestations) { const collectedSender = collected.getSender(); + // Skip attestations with invalid signatures + if (!collectedSender) { + this.log.warn(`Skipping attestation with invalid signature for slot ${slot}`); + continue; + } if ( !myAddresses.some(address => address.equals(collectedSender)) && - !oldSenders.some(sender => sender.equals(collectedSender)) + !oldSenders.some(sender => sender?.equals(collectedSender)) ) { this.log.debug(`Received attestation for slot ${slot} from ${collectedSender.toString()}`); }