diff --git a/.gitignore b/.gitignore index 6c624e2e1f..a718c28914 100644 --- a/.gitignore +++ b/.gitignore @@ -109,4 +109,8 @@ dist # vscode .vscode/ -.pdf \ No newline at end of file +# python +__pycache__/ + +# whitepaper +.pdf diff --git a/packages/protocol/.solhintignore b/packages/protocol/.solhintignore index f64817aa3b..b15d9ed051 100644 --- a/packages/protocol/.solhintignore +++ b/packages/protocol/.solhintignore @@ -3,4 +3,6 @@ contracts/aux/tokens/ERC20Upgradeable.sol contracts/test/TestLibRLPReader.sol contracts/test/TestLibRLPWriter.sol contracts/libs/Lib1559Math.sol +contracts/libs/LibAddress.sol +contracts/libs/LibMath.sol **/contracts/thirdparty/**/*.sol \ No newline at end of file diff --git a/packages/protocol/contracts/L1/LibData.sol b/packages/protocol/contracts/L1/LibData.sol index cdfb313401..6c6094b649 100644 --- a/packages/protocol/contracts/L1/LibData.sol +++ b/packages/protocol/contracts/L1/LibData.sol @@ -26,17 +26,22 @@ library LibData { uint64 commitSlot; } + // 3 slots struct ProposedBlock { bytes32 metaHash; + uint256 deposit; + address proposer; + uint64 proposedAt; } + // 3 + n slots struct ForkChoice { bytes32 blockHash; - uint64 proposedAt; uint64 provenAt; address[] provers; } + // This struct takes 9 slots. struct State { // block id => block hash mapping(uint256 => bytes32) l2Hashes; @@ -46,39 +51,61 @@ library LibData { mapping(uint256 => mapping(bytes32 => ForkChoice)) forkChoices; // proposer => commitSlot => hash(commitHash, commitHeight) mapping(address => mapping(uint256 => bytes32)) commits; - mapping(address => bool) provers; // Whitelisted provers - uint64 statusBits; + // Never or rarely changed uint64 genesisHeight; + uint64 genesisTimestamp; + uint64 __reservedA1; + uint64 statusBits; // rarely change + // Changed when a block is proposed or proven/finalized + uint256 feeBase; + // Changed when a block is proposed + uint64 nextBlockId; + uint64 lastProposedAt; // Timestamp when the last block is proposed. + uint64 avgBlockTime; // The block time moving average + uint64 __avgGasLimit; // the block gaslimit moving average, not updated. + // Changed when a block is proven/finalized uint64 latestVerifiedHeight; uint64 latestVerifiedId; - uint64 nextBlockId; + uint64 avgProofTime; // the proof time moving average + uint64 __reservedC1; + // Reserved + uint256[42] __gap; + } + + struct TentativeState { + mapping(address => bool) proposers; // Whitelisted proposers + mapping(address => bool) provers; // Whitelisted provers + bool whitelistProposers; + bool whitelistProvers; + // // Reserved + uint256[46] __gap; } function saveProposedBlock( - LibData.State storage s, + LibData.State storage state, uint256 id, ProposedBlock memory blk ) internal { - s.proposedBlocks[id % LibConstants.TAIKO_MAX_PROPOSED_BLOCKS] = blk; + state.proposedBlocks[id % LibConstants.K_MAX_NUM_BLOCKS] = blk; } function getProposedBlock( - State storage s, + State storage state, uint256 id ) internal view returns (ProposedBlock storage) { - return s.proposedBlocks[id % LibConstants.TAIKO_MAX_PROPOSED_BLOCKS]; + return state.proposedBlocks[id % LibConstants.K_MAX_NUM_BLOCKS]; } function getL2BlockHash( - State storage s, + State storage state, uint256 number ) internal view returns (bytes32) { - require(number <= s.latestVerifiedHeight, "L1:id"); - return s.l2Hashes[number]; + require(number <= state.latestVerifiedHeight, "L1:id"); + return state.l2Hashes[number]; } function getStateVariables( - State storage s + State storage state ) internal view @@ -89,10 +116,10 @@ library LibData { uint64 nextBlockId ) { - genesisHeight = s.genesisHeight; - latestVerifiedHeight = s.latestVerifiedHeight; - latestVerifiedId = s.latestVerifiedId; - nextBlockId = s.nextBlockId; + genesisHeight = state.genesisHeight; + latestVerifiedHeight = state.latestVerifiedHeight; + latestVerifiedId = state.latestVerifiedId; + nextBlockId = state.nextBlockId; } function hashMetadata( diff --git a/packages/protocol/contracts/L1/TaikoL1.sol b/packages/protocol/contracts/L1/TaikoL1.sol index 07c149d785..e776cd083d 100644 --- a/packages/protocol/contracts/L1/TaikoL1.sol +++ b/packages/protocol/contracts/L1/TaikoL1.sol @@ -8,36 +8,40 @@ // ╱╱╰╯╰╯╰┻┻╯╰┻━━╯╰━━━┻╯╰┻━━┻━━╯ pragma solidity ^0.8.9; -import "@openzeppelin/contracts-upgradeable/utils/math/SafeCastUpgradeable.sol"; - -import "../common/ConfigManager.sol"; import "../common/EssentialContract.sol"; import "../common/IHeaderSync.sol"; import "../libs/LibAnchorSignature.sol"; import "./LibData.sol"; import "./v1/V1Events.sol"; -import "./v1/V1Finalizing.sol"; import "./v1/V1Proposing.sol"; import "./v1/V1Proving.sol"; import "./v1/V1Utils.sol"; +import "./v1/V1Verifying.sol"; /** * @author dantaik */ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { using LibData for LibData.State; - using LibTxDecoder for bytes; - using SafeCastUpgradeable for uint256; LibData.State public state; - uint256[43] private __gap; + LibData.TentativeState public tentative; + uint256[50] private __gap; function init( address _addressManager, - bytes32 _genesisBlockHash + bytes32 _genesisBlockHash, + uint256 _feeBase ) external initializer { EssentialContract._init(_addressManager); - V1Finalizing.init(state, _genesisBlockHash); + V1Verifying.init({ + state: state, + genesisBlockHash: _genesisBlockHash, + feeBase: _feeBase + }); + + tentative.whitelistProposers = false; + tentative.whitelistProvers = true; } /** @@ -74,19 +78,25 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { * transactions in the L2 block. */ function proposeBlock(bytes[] calldata inputs) external nonReentrant { - V1Proposing.proposeBlock(state, inputs); - V1Finalizing.verifyBlocks( - state, - LibConstants.TAIKO_MAX_VERIFICATIONS_PER_TX, - false - ); + V1Proposing.proposeBlock({ + state: state, + tentative: tentative, + resolver: AddressResolver(this), + inputs: inputs + }); + V1Verifying.verifyBlocks({ + state: state, + resolver: AddressResolver(this), + maxBlocks: LibConstants.K_MAX_VERIFICATIONS_PER_TX, + checkHalt: false + }); } /** * Prove a block is valid with a zero-knowledge proof, a transaction * merkel proof, and a receipt merkel proof. * - * @param blockIndex The index of the block to prove. This is also used + * @param blockId The index of the block to prove. This is also used * to select the right implementation version. * @param inputs A list of data input: * - inputs[0] is an abi-encoded object with various information @@ -98,22 +108,29 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { */ function proveBlock( - uint256 blockIndex, + uint256 blockId, bytes[] calldata inputs ) external nonReentrant { - V1Proving.proveBlock(state, AddressResolver(this), blockIndex, inputs); - V1Finalizing.verifyBlocks( - state, - LibConstants.TAIKO_MAX_VERIFICATIONS_PER_TX, - false - ); + V1Proving.proveBlock({ + state: state, + tentative: tentative, + resolver: AddressResolver(this), + blockId: blockId, + inputs: inputs + }); + V1Verifying.verifyBlocks({ + state: state, + resolver: AddressResolver(this), + maxBlocks: LibConstants.K_MAX_VERIFICATIONS_PER_TX, + checkHalt: false + }); } /** * Prove a block is invalid with a zero-knowledge proof and a receipt * merkel proof. * - * @param blockIndex The index of the block to prove. This is also used to + * @param blockId The index of the block to prove. This is also used to * select the right implementation version. * @param inputs A list of data input: * - inputs[0] An Evidence object with various information regarding @@ -124,20 +141,22 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { * be the only transaction in the L2 block. */ function proveBlockInvalid( - uint256 blockIndex, + uint256 blockId, bytes[] calldata inputs ) external nonReentrant { - V1Proving.proveBlockInvalid( - state, - AddressResolver(this), - blockIndex, - inputs - ); - V1Finalizing.verifyBlocks( - state, - LibConstants.TAIKO_MAX_VERIFICATIONS_PER_TX, - false - ); + V1Proving.proveBlockInvalid({ + state: state, + tentative: tentative, + resolver: AddressResolver(this), + blockId: blockId, + inputs: inputs + }); + V1Verifying.verifyBlocks({ + state: state, + resolver: AddressResolver(this), + maxBlocks: LibConstants.K_MAX_VERIFICATIONS_PER_TX, + checkHalt: false + }); } /** @@ -146,10 +165,49 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { */ function verifyBlocks(uint256 maxBlocks) external nonReentrant { require(maxBlocks > 0, "L1:maxBlocks"); - V1Finalizing.verifyBlocks(state, maxBlocks, true); + V1Verifying.verifyBlocks({ + state: state, + resolver: AddressResolver(this), + maxBlocks: maxBlocks, + checkHalt: true + }); + } + + /** + * Enable or disable proposer and prover whitelisting + * @param whitelistProposers True to enable proposer whitelisting. + * @param whitelistProvers True to enable prover whitelisting. + */ + function enableWhitelisting( + bool whitelistProposers, + bool whitelistProvers + ) public onlyOwner { + V1Utils.enableWhitelisting({ + tentative: tentative, + whitelistProposers: whitelistProposers, + whitelistProvers: whitelistProvers + }); + } + + /** + * Add or remove a proposer from the whitelist. + * + * @param proposer The proposer to be added or removed. + * @param whitelisted True to add; remove otherwise. + */ + function whitelistProposer( + address proposer, + bool whitelisted + ) public onlyOwner { + V1Utils.whitelistProposer({ + tentative: tentative, + proposer: proposer, + whitelisted: whitelisted + }); } - /* Add or remove a prover from the whitelist. + /** + * Add or remove a prover from the whitelist. * * @param prover The prover to be added or removed. * @param whitelisted True to add; remove otherwise. @@ -158,7 +216,11 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { address prover, bool whitelisted ) public onlyOwner { - V1Proving.whitelistProver(state, prover, whitelisted); + V1Utils.whitelistProver({ + tentative: tentative, + prover: prover, + whitelisted: whitelisted + }); } /** @@ -169,6 +231,18 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { V1Utils.halt(state, toHalt); } + /** + * Check whether a proposer is whitelisted. + * + * @param proposer The proposer. + * @return True if the proposer is whitelisted, false otherwise. + */ + function isProposerWhitelisted( + address proposer + ) public view returns (bool) { + return V1Utils.isProposerWhitelisted(tentative, proposer); + } + /** * Check whether a prover is whitelisted. * @@ -176,7 +250,23 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { * @return True if the prover is whitelisted, false otherwise. */ function isProverWhitelisted(address prover) public view returns (bool) { - return V1Proving.isProverWhitelisted(state, prover); + return V1Utils.isProverWhitelisted(tentative, prover); + } + + function getBlockFee() public view returns (uint256) { + (, uint fee, uint deposit) = V1Proposing.getBlockFee(state); + return fee + deposit; + } + + function getProofReward( + uint64 provenAt, + uint64 proposedAt + ) public view returns (uint256 reward) { + (, reward, ) = V1Verifying.getProofReward({ + state: state, + provenAt: provenAt, + proposedAt: proposedAt + }); } /** @@ -249,36 +339,30 @@ contract TaikoL1 is EssentialContract, IHeaderSync, V1Events { pure returns ( uint256, // K_ZKPROOFS_PER_BLOCK - uint256, // TAIKO_CHAIN_ID - uint256, // TAIKO_MAX_PROPOSED_BLOCKS - uint256, // TAIKO_MAX_VERIFICATIONS_PER_TX - uint256, // K_COMMIT_DELAY_CONFIRMATIONS - uint256, // TAIKO_MAX_PROOFS_PER_FORK_CHOICE - uint256, // TAIKO_BLOCK_MAX_GAS_LIMIT - uint256, // TAIKO_BLOCK_MAX_TXS - bytes32, // TAIKO_BLOCK_DEADEND_HASH - uint256, // TAIKO_TXLIST_MAX_BYTES - uint256, // TAIKO_TX_MIN_GAS_LIMIT - uint256, // V1_ANCHOR_TX_GAS_LIMIT - bytes4, // V1_ANCHOR_TX_SELECTOR - bytes32 // V1_INVALIDATE_BLOCK_LOG_TOPIC + uint256, // K_CHAIN_ID + uint256, // K_MAX_NUM_BLOCKS + uint256, // K_MAX_VERIFICATIONS_PER_TX + uint256, // K_COMMIT_DELAY_CONFIRMS + uint256, // K_MAX_PROOFS_PER_FORK_CHOICE + uint256, // K_BLOCK_MAX_GAS_LIMIT + uint256, // K_BLOCK_MAX_TXS + uint256, // K_TXLIST_MAX_BYTES + uint256, // K_TX_MIN_GAS_LIMIT + uint256 // K_ANCHOR_TX_GAS_LIMIT ) { return ( LibConstants.K_ZKPROOFS_PER_BLOCK, - LibConstants.TAIKO_CHAIN_ID, - LibConstants.TAIKO_MAX_PROPOSED_BLOCKS, - LibConstants.TAIKO_MAX_VERIFICATIONS_PER_TX, - LibConstants.K_COMMIT_DELAY_CONFIRMATIONS, - LibConstants.TAIKO_MAX_PROOFS_PER_FORK_CHOICE, - LibConstants.TAIKO_BLOCK_MAX_GAS_LIMIT, - LibConstants.TAIKO_BLOCK_MAX_TXS, - LibConstants.TAIKO_BLOCK_DEADEND_HASH, - LibConstants.TAIKO_TXLIST_MAX_BYTES, - LibConstants.TAIKO_TX_MIN_GAS_LIMIT, - LibConstants.V1_ANCHOR_TX_GAS_LIMIT, - LibConstants.V1_ANCHOR_TX_SELECTOR, - LibConstants.V1_INVALIDATE_BLOCK_LOG_TOPIC + LibConstants.K_CHAIN_ID, + LibConstants.K_MAX_NUM_BLOCKS, + LibConstants.K_MAX_VERIFICATIONS_PER_TX, + LibConstants.K_COMMIT_DELAY_CONFIRMS, + LibConstants.K_MAX_PROOFS_PER_FORK_CHOICE, + LibConstants.K_BLOCK_MAX_GAS_LIMIT, + LibConstants.K_BLOCK_MAX_TXS, + LibConstants.K_TXLIST_MAX_BYTES, + LibConstants.K_TX_MIN_GAS_LIMIT, + LibConstants.K_ANCHOR_TX_GAS_LIMIT ); } } diff --git a/packages/protocol/contracts/L1/TkoToken.sol b/packages/protocol/contracts/L1/TkoToken.sol index a6a44c7097..58e8979f33 100644 --- a/packages/protocol/contracts/L1/TkoToken.sol +++ b/packages/protocol/contracts/L1/TkoToken.sol @@ -16,7 +16,7 @@ import "../libs/LibMath.sol"; import "../thirdparty/ERC20Upgradeable.sol"; /// @author dantaik -/// @dev This is Taiko's governance token. +/// @dev This is Taiko's governance and fee token. contract TkoToken is EssentialContract, ERC20Upgradeable, IMintableERC20 { using LibMath for uint256; using SafeCastUpgradeable for uint256; @@ -42,7 +42,11 @@ contract TkoToken is EssentialContract, ERC20Upgradeable, IMintableERC20 { /// amountMintToDAO and amountMintToDev shall be set to ~150,000,000. function init(address _addressManager) external initializer { EssentialContract._init(_addressManager); - ERC20Upgradeable.__ERC20_init("Taiko Token", "TKO", 18); + ERC20Upgradeable.__ERC20_init({ + name_: "Taiko Token", + symbol_: "TKO", + decimals_: 18 + }); } /********************* diff --git a/packages/protocol/contracts/L1/v1/V1Events.sol b/packages/protocol/contracts/L1/v1/V1Events.sol index 70ecde99b5..e9dd9d3a93 100644 --- a/packages/protocol/contracts/L1/v1/V1Events.sol +++ b/packages/protocol/contracts/L1/v1/V1Events.sol @@ -32,6 +32,10 @@ abstract contract V1Events { address prover ); + event WhitelistingEnabled(bool whitelistProposers, bool whitelistProvers); + + event ProposerWhitelisted(address indexed prover, bool whitelisted); + event ProverWhitelisted(address indexed prover, bool whitelisted); event Halted(bool halted); diff --git a/packages/protocol/contracts/L1/v1/V1Finalizing.sol b/packages/protocol/contracts/L1/v1/V1Finalizing.sol deleted file mode 100644 index f5f83c3f65..0000000000 --- a/packages/protocol/contracts/L1/v1/V1Finalizing.sol +++ /dev/null @@ -1,87 +0,0 @@ -// SPDX-License-Identifier: MIT -// -// ╭━━━━╮╱╱╭╮╱╱╱╱╱╭╮╱╱╱╱╱╭╮ -// ┃╭╮╭╮┃╱╱┃┃╱╱╱╱╱┃┃╱╱╱╱╱┃┃ -// ╰╯┃┃┣┻━┳┫┃╭┳━━╮┃┃╱╱╭━━┫╰━┳━━╮ -// ╱╱┃┃┃╭╮┣┫╰╯┫╭╮┃┃┃╱╭┫╭╮┃╭╮┃━━┫ -// ╱╱┃┃┃╭╮┃┃╭╮┫╰╯┃┃╰━╯┃╭╮┃╰╯┣━━┃ -// ╱╱╰╯╰╯╰┻┻╯╰┻━━╯╰━━━┻╯╰┻━━┻━━╯ -pragma solidity ^0.8.9; - -import "./V1Utils.sol"; - -/// @author dantaik -library V1Finalizing { - event BlockVerified(uint256 indexed id, bytes32 blockHash); - - event HeaderSynced( - uint256 indexed height, - uint256 indexed srcHeight, - bytes32 srcHash - ); - - function init(LibData.State storage s, bytes32 _genesisBlockHash) public { - s.l2Hashes[0] = _genesisBlockHash; - s.nextBlockId = 1; - s.genesisHeight = uint64(block.number); - - emit BlockVerified(0, _genesisBlockHash); - emit HeaderSynced(block.number, 0, _genesisBlockHash); - } - - function verifyBlocks( - LibData.State storage s, - uint256 maxBlocks, - bool checkHalt - ) public { - bool halted = V1Utils.isHalted(s); - if (checkHalt) { - assert(!halted); - } else if (halted) { - // skip finalizing blocks - return; - } - - uint64 latestL2Height = s.latestVerifiedHeight; - bytes32 latestL2Hash = s.l2Hashes[latestL2Height]; - uint64 processed = 0; - - for ( - uint256 i = s.latestVerifiedId + 1; - i < s.nextBlockId && processed <= maxBlocks; - i++ - ) { - LibData.ForkChoice storage fc = s.forkChoices[i][latestL2Hash]; - - // TODO(daniel): use the average proof-time. - if ( - block.timestamp <= - fc.provenAt + LibConstants.K_VERIFICATION_DELAY - ) { - // This block is proven but still needs to wait for verificaiton. - break; - } - - if (fc.blockHash == LibConstants.TAIKO_BLOCK_DEADEND_HASH) { - emit BlockVerified(i, 0); - } else if (fc.blockHash != 0) { - latestL2Height += 1; - latestL2Hash = fc.blockHash; - emit BlockVerified(i, latestL2Hash); - } else { - break; - } - processed += 1; - } - - if (processed > 0) { - s.latestVerifiedId += processed; - - if (latestL2Height > s.latestVerifiedHeight) { - s.latestVerifiedHeight = latestL2Height; - s.l2Hashes[latestL2Height] = latestL2Hash; - emit HeaderSynced(block.number, latestL2Height, latestL2Hash); - } - } - } -} diff --git a/packages/protocol/contracts/L1/v1/V1Proposing.sol b/packages/protocol/contracts/L1/v1/V1Proposing.sol index 8f0af0acea..91544f57ca 100644 --- a/packages/protocol/contracts/L1/v1/V1Proposing.sol +++ b/packages/protocol/contracts/L1/v1/V1Proposing.sol @@ -11,6 +11,7 @@ pragma solidity ^0.8.9; import "../../common/ConfigManager.sol"; import "../../libs/LibConstants.sol"; import "../../libs/LibTxDecoder.sol"; +import "../TkoToken.sol"; import "./V1Utils.sol"; /// @author dantaik @@ -26,104 +27,178 @@ library V1Proposing { ); event BlockProposed(uint256 indexed id, LibData.BlockMetadata meta); + modifier onlyWhitelistedProposer(LibData.TentativeState storage tentative) { + if (tentative.whitelistProposers) { + require(tentative.proposers[msg.sender], "L1:whitelist"); + } + _; + } + function commitBlock( - LibData.State storage s, + LibData.State storage state, uint64 commitSlot, bytes32 commitHash ) public { - assert(LibConstants.K_COMMIT_DELAY_CONFIRMATIONS > 0); + assert(LibConstants.K_COMMIT_DELAY_CONFIRMS > 0); // It's OK to allow committing block when the system is halt. // By not checking the halt status, this method will be cheaper. // - // assert(!V1Utils.isHalted(s)); + // assert(!V1Utils.isHalted(state)); bytes32 hash = _aggregateCommitHash(block.number, commitHash); - require(s.commits[msg.sender][commitSlot] != hash, "L1:committed"); - s.commits[msg.sender][commitSlot] = hash; + require(state.commits[msg.sender][commitSlot] != hash, "L1:committed"); + state.commits[msg.sender][commitSlot] = hash; - emit BlockCommitted(commitSlot, uint64(block.number), commitHash); + emit BlockCommitted({ + commitSlot: commitSlot, + commitHeight: uint64(block.number), + commitHash: commitHash + }); } function proposeBlock( - LibData.State storage s, + LibData.State storage state, + LibData.TentativeState storage tentative, + AddressResolver resolver, bytes[] calldata inputs - ) public { - assert(!V1Utils.isHalted(s)); + ) public onlyWhitelistedProposer(tentative) { + assert(!V1Utils.isHalted(state)); require(inputs.length == 2, "L1:inputs:size"); LibData.BlockMetadata memory meta = abi.decode( inputs[0], (LibData.BlockMetadata) ); - bytes calldata txList = inputs[1]; - + _verifyBlockCommit(state, meta); _validateMetadata(meta); - if (LibConstants.K_COMMIT_DELAY_CONFIRMATIONS > 0) { - bytes32 commitHash = _calculateCommitHash( - meta.beneficiary, - meta.txListHash + { + bytes calldata txList = inputs[1]; + // perform validation and populate some fields + require( + txList.length > 0 && + txList.length <= LibConstants.K_TXLIST_MAX_BYTES && + meta.txListHash == txList.hashTxList(), + "L1:txList" ); - require( - isCommitValid( - s, - meta.commitSlot, - meta.commitHeight, - commitHash - ), - "L1:notCommitted" + state.nextBlockId < + state.latestVerifiedId + LibConstants.K_MAX_NUM_BLOCKS, + "L1:tooMany" ); - if (meta.commitSlot == 0) { - // Special handling of slot 0 for refund; non-zero slots - // are supposed to managed by node software for reuse. - delete s.commits[msg.sender][meta.commitSlot]; + meta.id = state.nextBlockId; + meta.l1Height = block.number - 1; + meta.l1Hash = blockhash(block.number - 1); + meta.timestamp = uint64(block.timestamp); + + // if multiple L2 blocks included in the same L1 block, + // their block.mixHash fields for randomness will be the same. + meta.mixHash = bytes32(block.difficulty); + } + + uint256 deposit; + if (LibConstants.K_TOKENOMICS_ENABLED) { + uint256 newFeeBase; + { + uint256 fee; + (newFeeBase, fee, deposit) = getBlockFee(state); + TkoToken(resolver.resolve("tko_token")).burn( + msg.sender, + fee + deposit + ); } + // Update feeBase and avgBlockTime + state.feeBase = V1Utils.movingAverage({ + maValue: state.feeBase, + newValue: newFeeBase, + maf: LibConstants.K_FEE_BASE_MAF + }); + + state.avgBlockTime = V1Utils + .movingAverage({ + maValue: state.avgBlockTime, + newValue: meta.timestamp - state.lastProposedAt, + maf: LibConstants.K_BLOCK_TIME_MAF + }) + .toUint64(); } - require( - txList.length > 0 && - txList.length <= LibConstants.TAIKO_TXLIST_MAX_BYTES && - meta.txListHash == txList.hashTxList(), - "L1:txList" - ); - require( - s.nextBlockId <= - s.latestVerifiedId + LibConstants.TAIKO_MAX_PROPOSED_BLOCKS, - "L1:tooMany" + state.saveProposedBlock( + state.nextBlockId, + LibData.ProposedBlock({ + metaHash: LibData.hashMetadata(meta), + deposit: deposit, + proposer: msg.sender, + proposedAt: meta.timestamp + }) ); - meta.id = s.nextBlockId; - meta.l1Height = block.number - 1; - meta.l1Hash = blockhash(block.number - 1); - meta.timestamp = uint64(block.timestamp); - - // if multiple L2 blocks included in the same L1 block, - // their block.mixHash fields for randomness will be the same. - meta.mixHash = bytes32(block.difficulty); - - s.saveProposedBlock( - s.nextBlockId, - LibData.ProposedBlock({metaHash: LibData.hashMetadata(meta)}) - ); + state.lastProposedAt = meta.timestamp; + emit BlockProposed(state.nextBlockId++, meta); + } - emit BlockProposed(s.nextBlockId++, meta); + function getBlockFee( + LibData.State storage state + ) public view returns (uint256 newFeeBase, uint256 fee, uint256 deposit) { + (newFeeBase, ) = V1Utils.getTimeAdjustedFee({ + state: state, + isProposal: true, + tNow: uint64(block.timestamp), + tLast: state.lastProposedAt, + tAvg: state.avgBlockTime, + tCap: LibConstants.K_BLOCK_TIME_CAP + }); + fee = V1Utils.getSlotsAdjustedFee({ + state: state, + isProposal: true, + feeBase: newFeeBase + }); + fee = V1Utils.getBootstrapDiscountedFee(state, fee); + deposit = (fee * LibConstants.K_PROPOSER_DEPOSIT_PCTG) / 100; } function isCommitValid( - LibData.State storage s, + LibData.State storage state, uint256 commitSlot, uint256 commitHeight, bytes32 commitHash ) public view returns (bool) { - assert(LibConstants.K_COMMIT_DELAY_CONFIRMATIONS > 0); + assert(LibConstants.K_COMMIT_DELAY_CONFIRMS > 0); bytes32 hash = _aggregateCommitHash(commitHeight, commitHash); return - s.commits[msg.sender][commitSlot] == hash && - block.number >= - commitHeight + LibConstants.K_COMMIT_DELAY_CONFIRMATIONS; + state.commits[msg.sender][commitSlot] == hash && + block.number >= commitHeight + LibConstants.K_COMMIT_DELAY_CONFIRMS; + } + + function _verifyBlockCommit( + LibData.State storage state, + LibData.BlockMetadata memory meta + ) private { + if (LibConstants.K_COMMIT_DELAY_CONFIRMS == 0) { + return; + } + bytes32 commitHash = _calculateCommitHash( + meta.beneficiary, + meta.txListHash + ); + + require( + isCommitValid({ + state: state, + commitSlot: meta.commitSlot, + commitHeight: meta.commitHeight, + commitHash: commitHash + }), + "L1:notCommitted" + ); + + if (meta.commitSlot == 0) { + // Special handling of slot 0 for refund; non-zero slots + // are supposed to managed by node software for reuse. + delete state.commits[msg.sender][meta.commitSlot]; + } } function _validateMetadata(LibData.BlockMetadata memory meta) private pure { @@ -139,7 +214,7 @@ library V1Proposing { ); require( - meta.gasLimit <= LibConstants.TAIKO_BLOCK_MAX_GAS_LIMIT, + meta.gasLimit <= LibConstants.K_BLOCK_MAX_GAS_LIMIT, "L1:gasLimit" ); require(meta.extraData.length <= 32, "L1:extraData"); diff --git a/packages/protocol/contracts/L1/v1/V1Proving.sol b/packages/protocol/contracts/L1/v1/V1Proving.sol index d023fd7cce..4182c022a5 100644 --- a/packages/protocol/contracts/L1/v1/V1Proving.sol +++ b/packages/protocol/contracts/L1/v1/V1Proving.sol @@ -44,22 +44,21 @@ library V1Proving { address prover ); - event ProverWhitelisted(address indexed prover, bool whitelisted); - - modifier onlyWhitelistedProver(LibData.State storage s) { - if (LibConstants.K_WHITELIST_PROVERS) { - require(s.provers[msg.sender], "L1:whitelist"); + modifier onlyWhitelistedProver(LibData.TentativeState storage tentative) { + if (tentative.whitelistProvers) { + require(tentative.provers[msg.sender], "L1:whitelist"); } _; } function proveBlock( - LibData.State storage s, + LibData.State storage state, + LibData.TentativeState storage tentative, AddressResolver resolver, - uint256 blockIndex, + uint256 blockId, bytes[] calldata inputs - ) public onlyWhitelistedProver(s) { - assert(!V1Utils.isHalted(s)); + ) public onlyWhitelistedProver(tentative) { + assert(!V1Utils.isHalted(state)); // Check and decode inputs require(inputs.length == 3, "L1:inputs:size"); @@ -68,7 +67,7 @@ library V1Proving { bytes calldata anchorReceipt = inputs[2]; // Check evidence - require(evidence.meta.id == blockIndex, "L1:id"); + require(evidence.meta.id == blockId, "L1:id"); require( evidence.proofs.length == 2 + LibConstants.K_ZKPROOFS_PER_BLOCK, "L1:proof:size" @@ -79,11 +78,11 @@ library V1Proving { require(_tx.txType == 0, "L1:anchor:type"); require( _tx.destination == - resolver.resolve(LibConstants.TAIKO_CHAIN_ID, "taiko"), + resolver.resolve(LibConstants.K_CHAIN_ID, "taiko"), "L1:anchor:dest" ); require( - _tx.gasLimit == LibConstants.V1_ANCHOR_TX_GAS_LIMIT, + _tx.gasLimit == LibConstants.K_ANCHOR_TX_GAS_LIMIT, "L1:anchor:gasLimit" ); @@ -95,7 +94,7 @@ library V1Proving { LibBytesUtils.equal( _tx.data, bytes.concat( - LibConstants.V1_ANCHOR_TX_SELECTOR, + LibConstants.K_ANCHOR_TX_SELECTOR, bytes32(evidence.meta.l1Height), evidence.meta.l1Hash ) @@ -105,12 +104,12 @@ library V1Proving { // Check anchor tx is the 1st tx in the block require( - LibMerkleTrie.verifyInclusionProof( - LibRLPWriter.writeUint(0), - anchorTx, - evidence.proofs[LibConstants.K_ZKPROOFS_PER_BLOCK], - evidence.header.transactionsRoot - ), + LibMerkleTrie.verifyInclusionProof({ + _key: LibRLPWriter.writeUint(0), + _value: anchorTx, + _proof: evidence.proofs[LibConstants.K_ZKPROOFS_PER_BLOCK], + _root: evidence.header.transactionsRoot + }), "L1:tx:proof" ); @@ -120,26 +119,33 @@ library V1Proving { require(receipt.status == 1, "L1:receipt:status"); require( - LibMerkleTrie.verifyInclusionProof( - LibRLPWriter.writeUint(0), - anchorReceipt, - evidence.proofs[LibConstants.K_ZKPROOFS_PER_BLOCK + 1], - evidence.header.receiptsRoot - ), + LibMerkleTrie.verifyInclusionProof({ + _key: LibRLPWriter.writeUint(0), + _value: anchorReceipt, + _proof: evidence.proofs[LibConstants.K_ZKPROOFS_PER_BLOCK + 1], + _root: evidence.header.receiptsRoot + }), "L1:receipt:proof" ); // ZK-prove block and mark block proven to be valid. - _proveBlock(s, resolver, evidence, evidence.meta, 0); + _proveBlock({ + state: state, + resolver: resolver, + evidence: evidence, + target: evidence.meta, + blockHashOverride: 0 + }); } function proveBlockInvalid( - LibData.State storage s, + LibData.State storage state, + LibData.TentativeState storage tentative, AddressResolver resolver, - uint256 blockIndex, + uint256 blockId, bytes[] calldata inputs - ) public onlyWhitelistedProver(s) { - assert(!V1Utils.isHalted(s)); + ) public onlyWhitelistedProver(tentative) { + assert(!V1Utils.isHalted(state)); // Check and decode inputs require(inputs.length == 3, "L1:inputs:size"); @@ -151,7 +157,7 @@ library V1Proving { bytes calldata invalidateBlockReceipt = inputs[2]; // Check evidence - require(evidence.meta.id == blockIndex, "L1:id"); + require(evidence.meta.id == blockId, "L1:id"); require( evidence.proofs.length == 1 + LibConstants.K_ZKPROOFS_PER_BLOCK, "L1:proof:size" @@ -167,63 +173,40 @@ library V1Proving { LibReceiptDecoder.Log memory log = receipt.logs[0]; require( log.contractAddress == - resolver.resolve(LibConstants.TAIKO_CHAIN_ID, "taiko"), + resolver.resolve(LibConstants.K_CHAIN_ID, "taiko"), "L1:receipt:addr" ); require(log.data.length == 0, "L1:receipt:data"); require( log.topics.length == 2 && - log.topics[0] == LibConstants.V1_INVALIDATE_BLOCK_LOG_TOPIC && + log.topics[0] == LibConstants.K_INVALIDATE_BLOCK_LOG_TOPIC && log.topics[1] == target.txListHash, "L1:receipt:topics" ); // Check the event is the first one in the throw-away block require( - LibMerkleTrie.verifyInclusionProof( - LibRLPWriter.writeUint(0), - invalidateBlockReceipt, - evidence.proofs[LibConstants.K_ZKPROOFS_PER_BLOCK], - evidence.header.receiptsRoot - ), + LibMerkleTrie.verifyInclusionProof({ + _key: LibRLPWriter.writeUint(0), + _value: invalidateBlockReceipt, + _proof: evidence.proofs[LibConstants.K_ZKPROOFS_PER_BLOCK], + _root: evidence.header.receiptsRoot + }), "L1:receipt:proof" ); // ZK-prove block and mark block proven as invalid. - _proveBlock( - s, - resolver, - evidence, - target, - LibConstants.TAIKO_BLOCK_DEADEND_HASH - ); - } - - function whitelistProver( - LibData.State storage s, - address prover, - bool enabled - ) public { - assert(LibConstants.K_WHITELIST_PROVERS); - require( - prover != address(0) && s.provers[prover] != enabled, - "L1:precondition" - ); - - s.provers[prover] = enabled; - emit ProverWhitelisted(prover, enabled); - } - - function isProverWhitelisted( - LibData.State storage s, - address prover - ) public view returns (bool) { - assert(LibConstants.K_WHITELIST_PROVERS); - return s.provers[prover]; + _proveBlock({ + state: state, + resolver: resolver, + evidence: evidence, + target: target, + blockHashOverride: LibConstants.K_BLOCK_DEADEND_HASH + }); } function _proveBlock( - LibData.State storage s, + LibData.State storage state, AddressResolver resolver, Evidence memory evidence, LibData.BlockMetadata memory target, @@ -232,68 +215,63 @@ library V1Proving { require(evidence.meta.id == target.id, "L1:height"); require(evidence.prover != address(0), "L1:prover"); - _checkMetadata(s, target); + _checkMetadata(state, target); _validateHeaderForMetadata(evidence.header, evidence.meta); bytes32 blockHash = evidence.header.hashBlockHeader(); for (uint256 i = 0; i < LibConstants.K_ZKPROOFS_PER_BLOCK; i++) { - LibZKP.verify( - ConfigManager(resolver.resolve("config_manager")).getValue( - string(abi.encodePacked("zk_vkey_", i)) - ), - evidence.proofs[i], - blockHash, - evidence.prover, - evidence.meta.txListHash - ); + LibZKP.verify({ + verificationKey: ConfigManager( + resolver.resolve("config_manager") + ).getValue(string(abi.encodePacked("zk_vkey_", i))), + zkproof: evidence.proofs[i], + blockHash: blockHash, + prover: evidence.prover, + txListHash: evidence.meta.txListHash + }); } - _markBlockProven( - s, - evidence.prover, - target, - evidence.header.parentHash, - blockHashOverride == 0 ? blockHash : blockHashOverride - ); + _markBlockProven({ + state: state, + prover: evidence.prover, + target: target, + parentHash: evidence.header.parentHash, + blockHash: blockHashOverride == 0 ? blockHash : blockHashOverride + }); } function _markBlockProven( - LibData.State storage s, + LibData.State storage state, address prover, LibData.BlockMetadata memory target, bytes32 parentHash, bytes32 blockHash ) private { - LibData.ForkChoice storage fc = s.forkChoices[target.id][parentHash]; + LibData.ForkChoice storage fc = state.forkChoices[target.id][ + parentHash + ]; if (fc.blockHash == 0) { fc.blockHash = blockHash; - fc.proposedAt = target.timestamp; fc.provenAt = uint64(block.timestamp); } else { - require( - fc.proposedAt == target.timestamp, - "L1:proposedAt:conflict" - ); - if (fc.blockHash != blockHash) { // We have a problem here: two proofs are both valid but claims // the new block has different hashes. - V1Utils.halt(s, true); + V1Utils.halt(state, true); return; } require( - fc.provers.length < - LibConstants.TAIKO_MAX_PROOFS_PER_FORK_CHOICE, + fc.provers.length < LibConstants.K_MAX_PROOFS_PER_FORK_CHOICE, "L1:proof:tooMany" ); - // No uncle proof can take more than 1.5x time the first proof did. - uint256 delay = fc.provenAt - fc.proposedAt; - uint256 deadline = fc.provenAt + delay / 2; - require(block.timestamp <= deadline, "L1:tooLate"); + require( + block.timestamp < V1Utils.uncleProofDeadline(state, fc), + "L1:tooLate" + ); for (uint256 i = 0; i < fc.provers.length; i++) { require(fc.provers[i] != prover, "L1:prover:dup"); @@ -302,14 +280,14 @@ library V1Proving { fc.provers.push(prover); - emit BlockProven( - target.id, - parentHash, - blockHash, - fc.proposedAt, - fc.provenAt, - prover - ); + emit BlockProven({ + id: target.id, + parentHash: parentHash, + blockHash: blockHash, + timestamp: target.timestamp, + provenAt: fc.provenAt, + prover: prover + }); } function _validateAnchorTxSignature( @@ -330,15 +308,15 @@ library V1Proving { } function _checkMetadata( - LibData.State storage s, + LibData.State storage state, LibData.BlockMetadata memory meta ) private view { require( - meta.id > s.latestVerifiedId && meta.id < s.nextBlockId, + meta.id > state.latestVerifiedId && meta.id < state.nextBlockId, "L1:meta:id" ); require( - LibData.getProposedBlock(s, meta.id).metaHash == + LibData.getProposedBlock(state, meta.id).metaHash == LibData.hashMetadata(meta), "L1:metaHash" ); @@ -353,7 +331,7 @@ library V1Proving { header.beneficiary == meta.beneficiary && header.difficulty == 0 && header.gasLimit == - meta.gasLimit + LibConstants.V1_ANCHOR_TX_GAS_LIMIT && + meta.gasLimit + LibConstants.K_ANCHOR_TX_GAS_LIMIT && header.gasUsed > 0 && header.timestamp == meta.timestamp && header.extraData.length == meta.extraData.length && diff --git a/packages/protocol/contracts/L1/v1/V1Utils.sol b/packages/protocol/contracts/L1/v1/V1Utils.sol index caab538f41..168254a4f2 100644 --- a/packages/protocol/contracts/L1/v1/V1Utils.sol +++ b/packages/protocol/contracts/L1/v1/V1Utils.sol @@ -15,28 +15,177 @@ import "../LibData.sol"; /// @author dantaik library V1Utils { + using LibMath for uint256; + uint64 public constant MASK_HALT = 1 << 0; + event WhitelistingEnabled(bool whitelistProposers, bool whitelistProvers); + event ProposerWhitelisted(address indexed proposer, bool whitelisted); + event ProverWhitelisted(address indexed prover, bool whitelisted); event Halted(bool halted); - function halt(LibData.State storage s, bool toHalt) public { - require(isHalted(s) != toHalt, "L1:precondition"); - setBit(s, MASK_HALT, toHalt); + function enableWhitelisting( + LibData.TentativeState storage tentative, + bool whitelistProposers, + bool whitelistProvers + ) internal { + tentative.whitelistProposers = whitelistProvers; + tentative.whitelistProvers = whitelistProvers; + emit WhitelistingEnabled(whitelistProposers, whitelistProvers); + } + + function whitelistProposer( + LibData.TentativeState storage tentative, + address proposer, + bool whitelisted + ) internal { + assert(tentative.whitelistProposers); + require( + proposer != address(0) && + tentative.proposers[proposer] != whitelisted, + "L1:precondition" + ); + + tentative.proposers[proposer] = whitelisted; + emit ProposerWhitelisted(proposer, whitelisted); + } + + function whitelistProver( + LibData.TentativeState storage tentative, + address prover, + bool whitelisted + ) internal { + assert(tentative.whitelistProvers); + require( + prover != address(0) && tentative.provers[prover] != whitelisted, + "L1:precondition" + ); + + tentative.provers[prover] = whitelisted; + emit ProverWhitelisted(prover, whitelisted); + } + + function halt(LibData.State storage state, bool toHalt) internal { + require(isHalted(state) != toHalt, "L1:precondition"); + setBit(state, MASK_HALT, toHalt); emit Halted(toHalt); } - function isHalted(LibData.State storage s) public view returns (bool) { - return isBitOne(s, MASK_HALT); + function isHalted( + LibData.State storage state + ) internal view returns (bool) { + return isBitOne(state, MASK_HALT); + } + + function isProposerWhitelisted( + LibData.TentativeState storage tentative, + address proposer + ) internal view returns (bool) { + assert(tentative.whitelistProposers); + return tentative.proposers[proposer]; + } + + function isProverWhitelisted( + LibData.TentativeState storage tentative, + address prover + ) internal view returns (bool) { + assert(tentative.whitelistProvers); + return tentative.provers[prover]; + } + + // Implement "Incentive Multipliers", see the whitepaper. + function getTimeAdjustedFee( + LibData.State storage state, + bool isProposal, + uint64 tNow, + uint64 tLast, + uint64 tAvg, + uint64 tCap + ) internal view returns (uint256 newFeeBase, uint256 tRelBp) { + if (tAvg == 0) { + newFeeBase = state.feeBase; + tRelBp = 0; + } else { + uint256 _tAvg = tAvg > tCap ? tCap : tAvg; + uint256 tGrace = (LibConstants.K_FEE_GRACE_PERIOD_PCTG * _tAvg) / + 100; + uint256 tMax = (LibConstants.K_FEE_MAX_PERIOD_PCTG * _tAvg) / 100; + uint256 a = tLast + tGrace; + uint256 b = tNow > a ? tNow - a : 0; + tRelBp = (b.min(tMax) * 10000) / tMax; // [0 - 10000] + uint256 alpha = 10000 + + ((LibConstants.K_REWARD_MULTIPLIER_PCTG - 100) * tRelBp) / + 100; + if (isProposal) { + newFeeBase = (state.feeBase * 10000) / alpha; // fee + } else { + newFeeBase = (state.feeBase * alpha) / 10000; // reward + } + } + } + + // Implement "Slot-availability Multipliers", see the whitepaper. + function getSlotsAdjustedFee( + LibData.State storage state, + bool isProposal, + uint256 feeBase + ) internal view returns (uint256) { + // m is the `n'` in the whitepaper + uint256 m = LibConstants.K_MAX_NUM_BLOCKS - + 1 + + LibConstants.K_FEE_PREMIUM_LAMDA; + // n is the number of unverified blocks + uint256 n = state.nextBlockId - state.latestVerifiedId - 1; + // k is `m − n + 1` or `m − n - 1`in the whitepaper + uint256 k = isProposal ? m - n - 1 : m - n + 1; + return (feeBase * (m - 1) * m) / (m - n) / k; + } + + // Implement "Bootstrap Discount Multipliers", see the whitepaper. + function getBootstrapDiscountedFee( + LibData.State storage state, + uint256 feeBase + ) internal view returns (uint256) { + uint256 halves = uint256(block.timestamp - state.genesisTimestamp) / + LibConstants.K_HALVING; + uint256 gamma = 1024 - (1024 >> halves); + return (feeBase * gamma) / 1024; + } + + // Returns a deterministic deadline for uncle proof submission. + function uncleProofDeadline( + LibData.State storage state, + LibData.ForkChoice storage fc + ) internal view returns (uint64) { + return fc.provenAt + state.avgProofTime; + } + + function movingAverage( + uint256 maValue, + uint256 newValue, + uint256 maf + ) internal pure returns (uint256) { + if (maValue == 0) { + return newValue; + } + uint256 _ma = (maValue * (maf - 1) + newValue) / maf; + return _ma > 0 ? _ma : maValue; } - function setBit(LibData.State storage s, uint64 mask, bool one) private { - s.statusBits = one ? s.statusBits | mask : s.statusBits & ~mask; + function setBit( + LibData.State storage state, + uint64 mask, + bool one + ) private { + state.statusBits = one + ? state.statusBits | mask + : state.statusBits & ~mask; } function isBitOne( - LibData.State storage s, + LibData.State storage state, uint64 mask ) private view returns (bool) { - return s.statusBits & mask != 0; + return state.statusBits & mask != 0; } } diff --git a/packages/protocol/contracts/L1/v1/V1Verifying.sol b/packages/protocol/contracts/L1/v1/V1Verifying.sol new file mode 100644 index 0000000000..7c0988ab66 --- /dev/null +++ b/packages/protocol/contracts/L1/v1/V1Verifying.sol @@ -0,0 +1,200 @@ +// SPDX-License-Identifier: MIT +// +// ╭━━━━╮╱╱╭╮╱╱╱╱╱╭╮╱╱╱╱╱╭╮ +// ┃╭╮╭╮┃╱╱┃┃╱╱╱╱╱┃┃╱╱╱╱╱┃┃ +// ╰╯┃┃┣┻━┳┫┃╭┳━━╮┃┃╱╱╭━━┫╰━┳━━╮ +// ╱╱┃┃┃╭╮┣┫╰╯┫╭╮┃┃┃╱╭┫╭╮┃╭╮┃━━┫ +// ╱╱┃┃┃╭╮┃┃╭╮┫╰╯┃┃╰━╯┃╭╮┃╰╯┣━━┃ +// ╱╱╰╯╰╯╰┻┻╯╰┻━━╯╰━━━┻╯╰┻━━┻━━╯ +pragma solidity ^0.8.9; + +import "../../common/AddressResolver.sol"; +import "../TkoToken.sol"; +import "./V1Utils.sol"; + +/// @author dantaik +library V1Verifying { + using SafeCastUpgradeable for uint256; + event BlockVerified(uint256 indexed id, bytes32 blockHash); + + event HeaderSynced( + uint256 indexed height, + uint256 indexed srcHeight, + bytes32 srcHash + ); + + function init( + LibData.State storage state, + bytes32 genesisBlockHash, + uint256 feeBase + ) public { + require(feeBase > 0, "L1:feeBase"); + + state.genesisHeight = uint64(block.number); + state.genesisTimestamp = uint64(block.timestamp); + state.feeBase = feeBase; + state.nextBlockId = 1; + state.lastProposedAt = uint64(block.timestamp); + state.l2Hashes[0] = genesisBlockHash; + + emit BlockVerified(0, genesisBlockHash); + emit HeaderSynced(block.number, 0, genesisBlockHash); + } + + function verifyBlocks( + LibData.State storage state, + AddressResolver resolver, + uint256 maxBlocks, + bool checkHalt + ) public { + bool halted = V1Utils.isHalted(state); + if (checkHalt) { + require(!halted, "L1:halted"); + } else if (halted) { + // skip finalizing blocks + return; + } + + uint64 latestL2Height = state.latestVerifiedHeight; + bytes32 latestL2Hash = state.l2Hashes[latestL2Height]; + uint64 processed = 0; + TkoToken tkoToken; + + for ( + uint256 i = state.latestVerifiedId + 1; + i < state.nextBlockId && processed <= maxBlocks; + i++ + ) { + LibData.ForkChoice storage fc = state.forkChoices[i][latestL2Hash]; + LibData.ProposedBlock storage target = LibData.getProposedBlock( + state, + i + ); + + // Uncle proof can not take more than 2x time the first proof did. + if (!_isVerifiable(state, fc)) { + break; + } else { + if (LibConstants.K_TOKENOMICS_ENABLED) { + uint256 newFeeBase; + { + uint256 reward; + uint256 tRelBp; // [0-10000], see the whitepaper + (newFeeBase, reward, tRelBp) = getProofReward({ + state: state, + provenAt: fc.provenAt, + proposedAt: target.proposedAt + }); + + if (address(tkoToken) == address(0)) { + tkoToken = TkoToken(resolver.resolve("tko_token")); + } + + _rewardProvers(fc, reward, tkoToken); + _refundProposerDeposit(target, tRelBp, tkoToken); + } + // Update feeBase and avgProofTime + state.feeBase = V1Utils.movingAverage({ + maValue: state.feeBase, + newValue: newFeeBase, + maf: LibConstants.K_FEE_BASE_MAF + }); + + state.avgProofTime = V1Utils + .movingAverage({ + maValue: state.avgProofTime, + newValue: fc.provenAt - target.proposedAt, + maf: LibConstants.K_PROOF_TIME_MAF + }) + .toUint64(); + } + + if (fc.blockHash != LibConstants.K_BLOCK_DEADEND_HASH) { + latestL2Height += 1; + latestL2Hash = fc.blockHash; + } + processed += 1; + _cleanUp(fc); + emit BlockVerified(i, fc.blockHash); + } + } + + if (processed > 0) { + state.latestVerifiedId += processed; + + if (latestL2Height > state.latestVerifiedHeight) { + state.latestVerifiedHeight = latestL2Height; + state.l2Hashes[latestL2Height] = latestL2Hash; + emit HeaderSynced(block.number, latestL2Height, latestL2Hash); + } + } + } + + function getProofReward( + LibData.State storage state, + uint64 provenAt, + uint64 proposedAt + ) public view returns (uint256 newFeeBase, uint256 reward, uint256 tRelBp) { + (newFeeBase, tRelBp) = V1Utils.getTimeAdjustedFee({ + state: state, + isProposal: false, + tNow: provenAt, + tLast: proposedAt, + tAvg: state.avgProofTime, + tCap: LibConstants.K_PROOF_TIME_CAP + }); + reward = V1Utils.getSlotsAdjustedFee({ + state: state, + isProposal: false, + feeBase: newFeeBase + }); + reward = (reward * (10000 - LibConstants.K_REWARD_BURN_BP)) / 10000; + } + + function _refundProposerDeposit( + LibData.ProposedBlock storage target, + uint256 tRelBp, + TkoToken tkoToken + ) private { + uint refund = (target.deposit * (10000 - tRelBp)) / 10000; + if (refund > 0) { + tkoToken.mint(target.proposer, refund); + } + } + + function _rewardProvers( + LibData.ForkChoice storage fc, + uint256 reward, + TkoToken tkoToken + ) private { + uint sum = 2 ** fc.provers.length - 1; + for (uint i = 0; i < fc.provers.length; i++) { + uint weight = (1 << (fc.provers.length - i - 1)); + uint proverReward = (reward * weight) / sum; + + if (tkoToken.balanceOf(fc.provers[i]) == 0) { + // reduce reward if the prover has 0 TKO balance. + proverReward /= 2; + } + tkoToken.mint(fc.provers[i], proverReward); + } + } + + function _cleanUp(LibData.ForkChoice storage fc) private { + fc.blockHash = 0; + fc.provenAt = 0; + for (uint i = 0; i < fc.provers.length; i++) { + fc.provers[i] = address(0); + } + delete fc.provers; + } + + function _isVerifiable( + LibData.State storage state, + LibData.ForkChoice storage fc + ) private view returns (bool) { + return + fc.blockHash != 0 && + block.timestamp > V1Utils.uncleProofDeadline(state, fc); + } +} diff --git a/packages/protocol/contracts/L2/V1TaikoL2.sol b/packages/protocol/contracts/L2/V1TaikoL2.sol index 1674cbea0f..f562b428bd 100644 --- a/packages/protocol/contracts/L2/V1TaikoL2.sol +++ b/packages/protocol/contracts/L2/V1TaikoL2.sol @@ -52,12 +52,13 @@ contract V1TaikoL2 is AddressResolver, ReentrancyGuard, IHeaderSync { for (uint256 i = 0; i < 255 && number >= i + 2; i++) { ancestors[i] = blockhash(number - i - 2); } - publicInputHash = _hashPublicInputs( - block.chainid, - number, - 0, - ancestors - ); + + publicInputHash = _hashPublicInputs({ + chainId: block.chainid, + number: number, + feeBase: 0, + ancestors: ancestors + }); } /********************** @@ -68,7 +69,8 @@ contract V1TaikoL2 is AddressResolver, ReentrancyGuard, IHeaderSync { * bridging. This function will also check certain block-level global * variables because they are not part of the Trie structure. * - * Note that this transaction shall be the first transaction in every L2 block. + * Note that this transaction shall be the first transaction in every + * L2 block. * * @param l1Height The latest L1 block height when this block was proposed. * @param l1Hash The latest L1 block hash when this block was proposed. @@ -94,11 +96,11 @@ contract V1TaikoL2 is AddressResolver, ReentrancyGuard, IHeaderSync { LibInvalidTxList.Reason hint, uint256 txIdx ) external { - LibInvalidTxList.Reason reason = LibInvalidTxList.isTxListInvalid( - txList, - hint, - txIdx - ); + LibInvalidTxList.Reason reason = LibInvalidTxList.isTxListInvalid({ + encoded: txList, + hint: hint, + txIdx: txIdx + }); require(reason != LibInvalidTxList.Reason.OK, "L2:reason"); _checkPublicInputs(); @@ -134,41 +136,38 @@ contract V1TaikoL2 is AddressResolver, ReentrancyGuard, IHeaderSync { * Private Functions * **********************/ + // NOTE: If the order of the return values of this function changes, then + // some test cases that using this function in generate_genesis.test.ts + // may also needs to be modified accordingly. function getConstants() public pure returns ( uint256, // K_ZKPROOFS_PER_BLOCK - uint256, // TAIKO_CHAIN_ID - uint256, // TAIKO_MAX_PROPOSED_BLOCKS - uint256, // TAIKO_MAX_VERIFICATIONS_PER_TX - uint256, // K_COMMIT_DELAY_CONFIRMATIONS - uint256, // TAIKO_MAX_PROOFS_PER_FORK_CHOICE - uint256, // TAIKO_BLOCK_MAX_GAS_LIMIT - uint256, // TAIKO_BLOCK_MAX_TXS - bytes32, // TAIKO_BLOCK_DEADEND_HASH - uint256, // TAIKO_TXLIST_MAX_BYTES - uint256, // TAIKO_TX_MIN_GAS_LIMIT - uint256, // V1_ANCHOR_TX_GAS_LIMIT - bytes4, // V1_ANCHOR_TX_SELECTOR - bytes32 // V1_INVALIDATE_BLOCK_LOG_TOPIC + uint256, // K_CHAIN_ID + uint256, // K_MAX_NUM_BLOCKS + uint256, // K_MAX_VERIFICATIONS_PER_TX + uint256, // K_COMMIT_DELAY_CONFIRMS + uint256, // K_MAX_PROOFS_PER_FORK_CHOICE + uint256, // K_BLOCK_MAX_GAS_LIMIT + uint256, // K_BLOCK_MAX_TXS + uint256, // K_TXLIST_MAX_BYTES + uint256, // K_TX_MIN_GAS_LIMIT + uint256 // K_ANCHOR_TX_GAS_LIMIT ) { return ( LibConstants.K_ZKPROOFS_PER_BLOCK, - LibConstants.TAIKO_CHAIN_ID, - LibConstants.TAIKO_MAX_PROPOSED_BLOCKS, - LibConstants.TAIKO_MAX_VERIFICATIONS_PER_TX, - LibConstants.K_COMMIT_DELAY_CONFIRMATIONS, - LibConstants.TAIKO_MAX_PROOFS_PER_FORK_CHOICE, - LibConstants.TAIKO_BLOCK_MAX_GAS_LIMIT, - LibConstants.TAIKO_BLOCK_MAX_TXS, - LibConstants.TAIKO_BLOCK_DEADEND_HASH, - LibConstants.TAIKO_TXLIST_MAX_BYTES, - LibConstants.TAIKO_TX_MIN_GAS_LIMIT, - LibConstants.V1_ANCHOR_TX_GAS_LIMIT, - LibConstants.V1_ANCHOR_TX_SELECTOR, - LibConstants.V1_INVALIDATE_BLOCK_LOG_TOPIC + LibConstants.K_CHAIN_ID, + LibConstants.K_MAX_NUM_BLOCKS, + LibConstants.K_MAX_VERIFICATIONS_PER_TX, + LibConstants.K_COMMIT_DELAY_CONFIRMS, + LibConstants.K_MAX_PROOFS_PER_FORK_CHOICE, + LibConstants.K_BLOCK_MAX_GAS_LIMIT, + LibConstants.K_BLOCK_MAX_TXS, + LibConstants.K_TXLIST_MAX_BYTES, + LibConstants.K_TX_MIN_GAS_LIMIT, + LibConstants.K_ANCHOR_TX_GAS_LIMIT ); } @@ -187,12 +186,22 @@ contract V1TaikoL2 is AddressResolver, ReentrancyGuard, IHeaderSync { require( publicInputHash == - _hashPublicInputs(chainId, parentHeight, 0, ancestors), + _hashPublicInputs({ + chainId: chainId, + number: parentHeight, + feeBase: 0, + ancestors: ancestors + }), "L2:publicInputHash" ); ancestors[parentHeight % 255] = parentHash; - publicInputHash = _hashPublicInputs(chainId, number, 0, ancestors); + publicInputHash = _hashPublicInputs({ + chainId: chainId, + number: number, + feeBase: 0, + ancestors: ancestors + }); l2Hashes[parentHeight] = parentHash; } @@ -200,9 +209,9 @@ contract V1TaikoL2 is AddressResolver, ReentrancyGuard, IHeaderSync { function _hashPublicInputs( uint256 chainId, uint256 number, - uint256 baseFee, + uint256 feeBase, bytes32[255] memory ancestors ) private pure returns (bytes32) { - return keccak256(abi.encodePacked(chainId, number, baseFee, ancestors)); + return keccak256(abi.encodePacked(chainId, number, feeBase, ancestors)); } } diff --git a/packages/protocol/contracts/bridge/Bridge.sol b/packages/protocol/contracts/bridge/Bridge.sol index a1108360d8..4bd3976649 100644 --- a/packages/protocol/contracts/bridge/Bridge.sol +++ b/packages/protocol/contracts/bridge/Bridge.sol @@ -59,7 +59,12 @@ contract Bridge is EssentialContract, IBridge { function sendMessage( Message calldata message ) external payable nonReentrant returns (bytes32 signal) { - return LibBridgeSend.sendMessage(state, AddressResolver(this), message); + return + LibBridgeSend.sendMessage({ + state: state, + resolver: AddressResolver(this), + message: message + }); } function sendSignal(bytes32 signal) external override { @@ -72,12 +77,12 @@ contract Bridge is EssentialContract, IBridge { bytes calldata proof ) external nonReentrant { return - LibBridgeProcess.processMessage( - state, - AddressResolver(this), - message, - proof - ); + LibBridgeProcess.processMessage({ + state: state, + resolver: AddressResolver(this), + message: message, + proof: proof + }); } function retryMessage( @@ -85,19 +90,23 @@ contract Bridge is EssentialContract, IBridge { bool isLastAttempt ) external nonReentrant { return - LibBridgeRetry.retryMessage( - state, - AddressResolver(this), - message, - isLastAttempt - ); + LibBridgeRetry.retryMessage({ + state: state, + resolver: AddressResolver(this), + message: message, + isLastAttempt: isLastAttempt + }); } function enableDestChain( uint256 _chainId, bool enabled ) external nonReentrant { - LibBridgeSend.enableDestChain(state, _chainId, enabled); + LibBridgeSend.enableDestChain({ + state: state, + chainId: _chainId, + enabled: enabled + }); } /********************* @@ -115,13 +124,13 @@ contract Bridge is EssentialContract, IBridge { ) public view virtual override returns (bool) { address srcBridge = resolve(srcChainId, "bridge"); return - LibBridgeSignal.isSignalReceived( - AddressResolver(this), - srcBridge, - srcBridge, - signal, - proof - ); + LibBridgeSignal.isSignalReceived({ + resolver: AddressResolver(this), + srcBridge: srcBridge, + sender: srcBridge, + signal: signal, + proof: proof + }); } function isSignalSent( @@ -139,13 +148,13 @@ contract Bridge is EssentialContract, IBridge { ) public view virtual override returns (bool) { address srcBridge = resolve(srcChainId, "bridge"); return - LibBridgeSignal.isSignalReceived( - AddressResolver(this), - srcBridge, - sender, - signal, - proof - ); + LibBridgeSignal.isSignalReceived({ + resolver: AddressResolver(this), + srcBridge: srcBridge, + sender: sender, + signal: signal, + proof: proof + }); } function getMessageStatus( diff --git a/packages/protocol/contracts/bridge/BridgedERC20.sol b/packages/protocol/contracts/bridge/BridgedERC20.sol index bb1ef4268e..e851894b6c 100644 --- a/packages/protocol/contracts/bridge/BridgedERC20.sol +++ b/packages/protocol/contracts/bridge/BridgedERC20.sol @@ -8,11 +8,13 @@ // ╱╱╰╯╰╯╰┻┻╯╰┻━━╯╰━━━┻╯╰┻━━┻━━╯ pragma solidity ^0.8.9; -import "../common/EssentialContract.sol"; -import "../thirdparty/ERC20Upgradeable.sol"; import "@openzeppelin/contracts-upgradeable/token/ERC20/IERC20Upgradeable.sol"; +// solhint-disable-next-line max-line-length import "@openzeppelin/contracts-upgradeable/token/ERC20/extensions/IERC20MetadataUpgradeable.sol"; +import "../common/EssentialContract.sol"; +import "../thirdparty/ERC20Upgradeable.sol"; + /** * @author dantaik */ @@ -50,7 +52,11 @@ contract BridgedERC20 is "BE:params" ); EssentialContract._init(_addressManager); - ERC20Upgradeable.__ERC20_init(_name, _symbol, _decimals); + ERC20Upgradeable.__ERC20_init({ + name_: _name, + symbol_: _symbol, + decimals_: _decimals + }); srcToken = _srcToken; srcChainId = _srcChainId; } diff --git a/packages/protocol/contracts/bridge/EtherVault.sol b/packages/protocol/contracts/bridge/EtherVault.sol index cc874dbb9f..310b2360b2 100644 --- a/packages/protocol/contracts/bridge/EtherVault.sol +++ b/packages/protocol/contracts/bridge/EtherVault.sol @@ -8,11 +8,13 @@ // ╱╱╰╯╰╯╰┻┻╯╰┻━━╯╰━━━┻╯╰┻━━┻━━╯ pragma solidity ^0.8.9; -import "../common/EssentialContract.sol"; -import "../libs/LibAddress.sol"; +// solhint-disable-next-line max-line-length import "@openzeppelin/contracts-upgradeable/token/ERC20/utils/SafeERC20Upgradeable.sol"; import "@openzeppelin/contracts-upgradeable/utils/Create2Upgradeable.sol"; +import "../common/EssentialContract.sol"; +import "../libs/LibAddress.sol"; + /** * Vault that holds Ether. * @author dantaik diff --git a/packages/protocol/contracts/bridge/TokenVault.sol b/packages/protocol/contracts/bridge/TokenVault.sol index 94b4f3e714..ede396b218 100644 --- a/packages/protocol/contracts/bridge/TokenVault.sol +++ b/packages/protocol/contracts/bridge/TokenVault.sol @@ -8,6 +8,7 @@ // ╱╱╰╯╰╯╰┻┻╯╰┻━━╯╰━━━┻╯╰┻━━┻━━╯ pragma solidity ^0.8.9; +// solhint-disable-next-line max-line-length import "@openzeppelin/contracts-upgradeable/token/ERC20/utils/SafeERC20Upgradeable.sol"; import "@openzeppelin/contracts-upgradeable/utils/Create2Upgradeable.sol"; @@ -141,7 +142,12 @@ contract TokenVault is EssentialContract { value: msg.value }(message); - emit EtherSent(to, destChainId, message.depositValue, signal); + emit EtherSent({ + to: to, + destChainId: destChainId, + amount: message.depositValue, + signal: signal + }); } /** @@ -286,13 +292,13 @@ contract TokenVault is EssentialContract { type(BridgedERC20).creationCode ); - BridgedERC20(payable(bridgedToken)).init( - address(_addressManager), - canonicalToken.addr, - canonicalToken.chainId, - canonicalToken.decimals, - canonicalToken.symbol, - string( + BridgedERC20(payable(bridgedToken)).init({ + _addressManager: address(_addressManager), + _srcToken: canonicalToken.addr, + _srcChainId: canonicalToken.chainId, + _decimals: canonicalToken.decimals, + _symbol: canonicalToken.symbol, + _name: string( abi.encodePacked( canonicalToken.name, "(bridged", @@ -301,23 +307,21 @@ contract TokenVault is EssentialContract { ")" ) ) - ); + }); isBridgedToken[bridgedToken] = true; - bridgedToCanonical[bridgedToken] = canonicalToken; - canonicalToBridged[canonicalToken.chainId][ canonicalToken.addr ] = bridgedToken; - emit BridgedERC20Deployed( - canonicalToken.chainId, - canonicalToken.addr, - bridgedToken, - canonicalToken.symbol, - canonicalToken.name, - canonicalToken.decimals - ); + emit BridgedERC20Deployed({ + srcChainId: canonicalToken.chainId, + canonicalToken: canonicalToken.addr, + bridgedToken: bridgedToken, + canonicalTokenSymbol: canonicalToken.symbol, + canonicalTokenName: canonicalToken.name, + canonicalTokenDecimal: canonicalToken.decimals + }); } } diff --git a/packages/protocol/contracts/bridge/libs/LibBridgeProcess.sol b/packages/protocol/contracts/bridge/libs/LibBridgeProcess.sol index ad3f4d5934..9696e841cf 100644 --- a/packages/protocol/contracts/bridge/libs/LibBridgeProcess.sol +++ b/packages/protocol/contracts/bridge/libs/LibBridgeProcess.sol @@ -60,14 +60,15 @@ library LibBridgeProcess { ); // Message must have been "received" on the destChain (current chain) address srcBridge = resolver.resolve(message.srcChainId, "bridge"); + require( - LibBridgeSignal.isSignalReceived( - resolver, - srcBridge, - srcBridge, - signal, - proof - ), + LibBridgeSignal.isSignalReceived({ + resolver: resolver, + srcBridge: srcBridge, + sender: srcBridge, + signal: signal, + proof: proof + }), "B:notReceived" ); @@ -94,12 +95,12 @@ library LibBridgeProcess { uint256 gasLimit = msg.sender == message.owner ? gasleft() : message.gasLimit; - bool success = LibBridgeInvoke.invokeMessageCall( - state, - message, - signal, - gasLimit - ); + bool success = LibBridgeInvoke.invokeMessageCall({ + state: state, + message: message, + signal: signal, + gasLimit: gasLimit + }); if (success) { status = LibBridgeData.MessageStatus.DONE; diff --git a/packages/protocol/contracts/bridge/libs/LibBridgeRetry.sol b/packages/protocol/contracts/bridge/libs/LibBridgeRetry.sol index c15aea619f..2a85925c7d 100644 --- a/packages/protocol/contracts/bridge/libs/LibBridgeRetry.sol +++ b/packages/protocol/contracts/bridge/libs/LibBridgeRetry.sol @@ -62,7 +62,12 @@ library LibBridgeRetry { // successful invocation if ( - LibBridgeInvoke.invokeMessageCall(state, message, signal, gasleft()) + LibBridgeInvoke.invokeMessageCall({ + state: state, + message: message, + signal: signal, + gasLimit: gasleft() + }) ) { state.updateMessageStatus(signal, LibBridgeData.MessageStatus.DONE); } else if (isLastAttempt) { diff --git a/packages/protocol/contracts/bridge/libs/LibBridgeSignal.sol b/packages/protocol/contracts/bridge/libs/LibBridgeSignal.sol index 47436c5148..a0fc61c48f 100644 --- a/packages/protocol/contracts/bridge/libs/LibBridgeSignal.sol +++ b/packages/protocol/contracts/bridge/libs/LibBridgeSignal.sol @@ -88,13 +88,13 @@ library LibBridgeSignal { require(srcBridge != address(0), "B:srcBridge"); SignalProof memory mkp = abi.decode(proof, (SignalProof)); - LibTrieProof.verify( - mkp.header.stateRoot, - srcBridge, - _key(sender, signal), - bytes32(uint256(1)), - mkp.proof - ); + LibTrieProof.verify({ + stateRoot: mkp.header.stateRoot, + addr: srcBridge, + key: _key(sender, signal), + value: bytes32(uint256(1)), + mkproof: mkp.proof + }); // get synced header hash of the header height specified in the proof bytes32 syncedHeaderHash = IHeaderSync(resolver.resolve("taiko")) .getSyncedHeader(mkp.header.height); diff --git a/packages/protocol/contracts/common/AddressResolver.sol b/packages/protocol/contracts/common/AddressResolver.sol index 0d4050ac6f..24a6d01e92 100644 --- a/packages/protocol/contracts/common/AddressResolver.sol +++ b/packages/protocol/contracts/common/AddressResolver.sol @@ -8,9 +8,10 @@ // ╱╱╰╯╰╯╰┻┻╯╰┻━━╯╰━━━┻╯╰┻━━┻━━╯ pragma solidity ^0.8.9; -import "./IAddressManager.sol"; import "@openzeppelin/contracts/utils/Strings.sol"; +import "./IAddressManager.sol"; + /** * This abstract contract provides a name-to-address lookup. Under the hood, * it uses an AddressManager to manage the name-to-address mapping. diff --git a/packages/protocol/contracts/common/EssentialContract.sol b/packages/protocol/contracts/common/EssentialContract.sol index 15efe953bd..3c6b086d38 100644 --- a/packages/protocol/contracts/common/EssentialContract.sol +++ b/packages/protocol/contracts/common/EssentialContract.sol @@ -9,6 +9,7 @@ pragma solidity ^0.8.9; import "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol"; +// solhint-disable-next-line max-line-length import "@openzeppelin/contracts-upgradeable/security/ReentrancyGuardUpgradeable.sol"; import "./AddressResolver.sol"; diff --git a/packages/protocol/contracts/libs/LibAnchorSignature.sol b/packages/protocol/contracts/libs/LibAnchorSignature.sol index 7de1e9b93b..fd1e90718e 100644 --- a/packages/protocol/contracts/libs/LibAnchorSignature.sol +++ b/packages/protocol/contracts/libs/LibAnchorSignature.sol @@ -11,9 +11,9 @@ import "./LibUint512Math.sol"; /// @author david library LibAnchorSignature { - address public constant TAIKO_GOLDEN_TOUCH_ADDRESS = + address public constant K_GOLDEN_TOUCH_ADDRESS = 0x0000777735367b36bC9B61C50022d9D0700dB4Ec; - uint256 public constant TAIKO_GOLDEN_TOUCH_PRIVATEKEY = + uint256 public constant K_GOLDEN_TOUCH_PRIVATEKEY = 0x92954368afd3caa1f3ce3ead0069c1af414054aefe1ef9aeacc1bf426222ce38; uint256 public constant GX = @@ -32,7 +32,7 @@ library LibAnchorSignature { // ( // uint256 GX_MUL_GOLDEN_TOUCH_PRIVATEKEY_LOW, // uint256 GX_MUL_GOLDEN_TOUCH_PRIVATEKEY_HIGH - // ) = LibUint512Math.mul(GX, TAIKO_GOLDEN_TOUCH_PRIVATEKEY); + // ) = LibUint512Math.mul(GX, K_GOLDEN_TOUCH_PRIVATEKEY); uint256 public constant GX_MUL_GOLDEN_TOUCH_PRIVATEKEY_LOW = 0xb4a95509ce05fe8d45987859a067780d16a367c0e2cacf79cd301b93fb717940; uint256 public constant GX_MUL_GOLDEN_TOUCH_PRIVATEKEY_HIGH = @@ -41,7 +41,7 @@ library LibAnchorSignature { // ( // uint256 GX2_MUL_GOLDEN_TOUCH_PRIVATEKEY_LOW, // uint256 GX2_MUL_GOLDEN_TOUCH_PRIVATEKEY_HIGH - // ) = LibUint512Math.mul(GX2, TAIKO_GOLDEN_TOUCH_PRIVATEKEY); + // ) = LibUint512Math.mul(GX2, K_GOLDEN_TOUCH_PRIVATEKEY); uint256 public constant GX2_MUL_GOLDEN_TOUCH_PRIVATEKEY_LOW = 0xad77eceea844778cb4376153fc8f06f12f1695df4585bf75bfb17ec19ce90818; uint256 public constant GX2_MUL_GOLDEN_TOUCH_PRIVATEKEY_HIGH = diff --git a/packages/protocol/contracts/libs/LibBlockHeader.sol b/packages/protocol/contracts/libs/LibBlockHeader.sol index 37f7808753..a312d31484 100644 --- a/packages/protocol/contracts/libs/LibBlockHeader.sol +++ b/packages/protocol/contracts/libs/LibBlockHeader.sol @@ -78,7 +78,7 @@ library LibBlockHeader { return header.parentHash != 0 && header.ommersHash == EMPTY_OMMERS_HASH && - header.gasLimit <= LibConstants.TAIKO_BLOCK_MAX_GAS_LIMIT && + header.gasLimit <= LibConstants.K_BLOCK_MAX_GAS_LIMIT && header.extraData.length <= 32 && header.difficulty == 0 && header.nonce == 0; diff --git a/packages/protocol/contracts/libs/LibConstants.sol b/packages/protocol/contracts/libs/LibConstants.sol index b43ff248fb..1b6bf7e2a5 100644 --- a/packages/protocol/contracts/libs/LibConstants.sol +++ b/packages/protocol/contracts/libs/LibConstants.sol @@ -10,29 +10,44 @@ pragma solidity ^0.8.9; /// @author dantaik library LibConstants { - uint256 public constant K_ZKPROOFS_PER_BLOCK = 1; // https://github.com/ethereum-lists/chains/pull/1611 - uint256 public constant TAIKO_CHAIN_ID = 167; + uint256 public constant K_CHAIN_ID = 167; + // up to 2048 pending blocks + uint256 public constant K_MAX_NUM_BLOCKS = 2049; + // This number is calculated from K_MAX_NUM_BLOCKS to make + // the 'the maximum value of the multiplier' close to 20.0 + uint256 public constant K_FEE_PREMIUM_LAMDA = 590; + uint256 public constant K_ZKPROOFS_PER_BLOCK = 1; uint256 public constant K_VERIFICATION_DELAY = 60 minutes; - uint256 public constant TAIKO_MAX_PROPOSED_BLOCKS = 2048; - uint256 public constant TAIKO_MAX_VERIFICATIONS_PER_TX = 20; - uint256 public constant K_COMMIT_DELAY_CONFIRMATIONS = 4; - uint256 public constant TAIKO_MAX_PROOFS_PER_FORK_CHOICE = 5; - uint256 public constant TAIKO_BLOCK_MAX_GAS_LIMIT = 5000000; // TODO - uint256 public constant TAIKO_BLOCK_MAX_TXS = 20; // TODO - bytes32 public constant TAIKO_BLOCK_DEADEND_HASH = bytes32(uint256(1)); + uint256 public constant K_MAX_VERIFICATIONS_PER_TX = 20; + uint256 public constant K_COMMIT_DELAY_CONFIRMS = 4; + uint256 public constant K_MAX_PROOFS_PER_FORK_CHOICE = 5; + uint256 public constant K_BLOCK_MAX_GAS_LIMIT = 5000000; // TODO + uint256 public constant K_BLOCK_MAX_TXS = 20; // TODO + uint256 public constant K_TXLIST_MAX_BYTES = 10240; // TODO + uint256 public constant K_TX_MIN_GAS_LIMIT = 21000; // TODO + uint256 public constant K_REWARD_BURN_BP = 100; // 100 basis points or 1% + uint256 public constant K_ANCHOR_TX_GAS_LIMIT = 250000; + uint256 public constant K_PROPOSER_DEPOSIT_PCTG = 25; // 25% - uint256 public constant TAIKO_TXLIST_MAX_BYTES = 10240; // TODO - uint256 public constant TAIKO_TX_MIN_GAS_LIMIT = 21000; // TODO + // Moving average factors + uint256 public constant K_FEE_BASE_MAF = 1024; + uint256 public constant K_BLOCK_TIME_MAF = 1024; + uint256 public constant K_PROOF_TIME_MAF = 1024; - // Taiko L2 releated constants - uint256 public constant V1_ANCHOR_TX_GAS_LIMIT = 250000; + uint64 public constant K_REWARD_MULTIPLIER_PCTG = 400; // 400% + uint64 public constant K_FEE_GRACE_PERIOD_PCTG = 125; // 125% + uint64 public constant K_FEE_MAX_PERIOD_PCTG = 375; // 375% + uint64 public constant K_BLOCK_TIME_CAP = 48 seconds; + uint64 public constant K_PROOF_TIME_CAP = 60 minutes; + uint64 public constant K_HALVING = 180 days; - bytes4 public constant V1_ANCHOR_TX_SELECTOR = + bytes4 public constant K_ANCHOR_TX_SELECTOR = bytes4(keccak256("anchor(uint256,bytes32)")); - bytes32 public constant V1_INVALIDATE_BLOCK_LOG_TOPIC = + bytes32 public constant K_BLOCK_DEADEND_HASH = bytes32(uint256(1)); + bytes32 public constant K_INVALIDATE_BLOCK_LOG_TOPIC = keccak256("BlockInvalidated(bytes32)"); - bool public constant K_WHITELIST_PROVERS = false; + bool public constant K_TOKENOMICS_ENABLED = true; } diff --git a/packages/protocol/contracts/libs/LibInvalidTxList.sol b/packages/protocol/contracts/libs/LibInvalidTxList.sol index 5231a8167f..40d3105129 100644 --- a/packages/protocol/contracts/libs/LibInvalidTxList.sol +++ b/packages/protocol/contracts/libs/LibInvalidTxList.sol @@ -18,23 +18,26 @@ import "../thirdparty/LibRLPWriter.sol"; * A library to invalidate a txList using the following rules: * * A txList is valid if and only if: - * 1. The txList's length is no more than `TAIKO_TXLIST_MAX_BYTES`. + * 1. The txList's length is no more than `K_TXLIST_MAX_BYTES`. * 2. The txList is well-formed RLP, with no additional trailing bytes. - * 3. The total number of transactions is no more than `TAIKO_BLOCK_MAX_TXS`. + * 3. The total number of transactions is no more than `K_BLOCK_MAX_TXS`. * 4. The sum of all transaction gas limit is no more than - * `TAIKO_BLOCK_MAX_GAS_LIMIT`. + * `K_BLOCK_MAX_GAS_LIMIT`. * * A transaction is valid if and only if: * 1. The transaction is well-formed RLP, with no additional trailing bytes * (rule #1 in Ethereum yellow paper). * 2. The transaction's signature is valid (rule #2 in Ethereum yellow paper). * 3. The transaction's the gas limit is no smaller than the intrinsic gas - * `TAIKO_TX_MIN_GAS_LIMIT` (rule #5 in Ethereum yellow paper). + * `K_TX_MIN_GAS_LIMIT` (rule #5 in Ethereum yellow paper). * * @title LibInvalidTxList * @author david */ library LibInvalidTxList { + // NOTE: If the order of this enum changes, then some test cases that using + // this enum in generate_genesis.test.ts may also needs to be + // modified accordingly. enum Reason { OK, BINARY_TOO_LARGE, @@ -50,20 +53,20 @@ library LibInvalidTxList { Reason hint, uint256 txIdx ) internal pure returns (Reason) { - if (encoded.length > LibConstants.TAIKO_TXLIST_MAX_BYTES) { + if (encoded.length > LibConstants.K_TXLIST_MAX_BYTES) { return Reason.BINARY_TOO_LARGE; } try LibTxDecoder.decodeTxList(encoded) returns ( LibTxDecoder.TxList memory txList ) { - if (txList.items.length > LibConstants.TAIKO_BLOCK_MAX_TXS) { + if (txList.items.length > LibConstants.K_BLOCK_MAX_TXS) { return Reason.BLOCK_TOO_MANY_TXS; } if ( LibTxDecoder.sumGasLimit(txList) > - LibConstants.TAIKO_BLOCK_MAX_GAS_LIMIT + LibConstants.K_BLOCK_MAX_GAS_LIMIT ) { return Reason.BLOCK_GAS_LIMIT_TOO_LARGE; } @@ -81,7 +84,7 @@ library LibInvalidTxList { if (hint == Reason.TX_GAS_LIMIT_TOO_SMALL) { require( - _tx.gasLimit >= LibConstants.TAIKO_TX_MIN_GAS_LIMIT, + _tx.gasLimit >= LibConstants.K_TX_MIN_GAS_LIMIT, "bad hint" ); return Reason.TX_GAS_LIMIT_TOO_SMALL; diff --git a/packages/protocol/contracts/libs/LibTxDecoder.sol b/packages/protocol/contracts/libs/LibTxDecoder.sol index c7aa4c2895..9c10316f7d 100644 --- a/packages/protocol/contracts/libs/LibTxDecoder.sol +++ b/packages/protocol/contracts/libs/LibTxDecoder.sol @@ -162,10 +162,7 @@ library LibTxDecoder { txLegacy.data = LibRLPReader.readBytes(body[5]); // EIP-155 is enabled on L2 txLegacy.v = uint8( - LibRLPReader.readUint256(body[6]) - - LibConstants.TAIKO_CHAIN_ID * - 2 + - 35 + LibRLPReader.readUint256(body[6]) - LibConstants.K_CHAIN_ID * 2 + 35 ); txLegacy.r = LibRLPReader.readUint256(body[7]); txLegacy.s = LibRLPReader.readUint256(body[8]); diff --git a/packages/protocol/contracts/libs/LibTxUtils.sol b/packages/protocol/contracts/libs/LibTxUtils.sol index 2faa63eb51..d68edfbbdc 100644 --- a/packages/protocol/contracts/libs/LibTxUtils.sol +++ b/packages/protocol/contracts/libs/LibTxUtils.sol @@ -72,9 +72,7 @@ library LibTxUtils { // For legacy transactions, there are three more RLP items to // encode defined in EIP-155. if (transaction.txType == 0 && i == list.length - 4) { - list[i + 1] = LibRLPWriter.writeUint( - LibConstants.TAIKO_CHAIN_ID - ); + list[i + 1] = LibRLPWriter.writeUint(LibConstants.K_CHAIN_ID); list[i + 2] = LibRLPWriter.writeUint64(0); list[i + 3] = LibRLPWriter.writeUint64(0); break; diff --git a/packages/protocol/contracts/test/libs/TestLibAnchorSignature.sol b/packages/protocol/contracts/test/libs/TestLibAnchorSignature.sol index 2f1d0bcdc8..3a80e2d2d1 100644 --- a/packages/protocol/contracts/test/libs/TestLibAnchorSignature.sol +++ b/packages/protocol/contracts/test/libs/TestLibAnchorSignature.sol @@ -29,8 +29,8 @@ library TestLibAnchorSignature { function goldenTouchAddress() public pure returns (address, uint256) { return ( - LibAnchorSignature.TAIKO_GOLDEN_TOUCH_ADDRESS, - LibAnchorSignature.TAIKO_GOLDEN_TOUCH_PRIVATEKEY + LibAnchorSignature.K_GOLDEN_TOUCH_ADDRESS, + LibAnchorSignature.K_GOLDEN_TOUCH_PRIVATEKEY ); } } diff --git a/packages/protocol/docs/L1/LibData.md b/packages/protocol/docs/L1/LibData.md index 63f11bb57d..21c79e4035 100644 --- a/packages/protocol/docs/L1/LibData.md +++ b/packages/protocol/docs/L1/LibData.md @@ -22,6 +22,8 @@ struct BlockMetadata { ```solidity struct ProposedBlock { bytes32 metaHash; + address proposer; + uint64 gasLimit; } ``` @@ -47,9 +49,18 @@ struct State { mapping(uint256 => mapping(bytes32 => struct LibData.ForkChoice)) forkChoices; mapping(bytes32 => uint256) commits; uint64 genesisHeight; + uint64 genesisTimestamp; + uint64 reservedA1; + uint64 reservedA2; + uint256 feeBase; + uint64 nextBlockId; + uint64 lastProposedAt; + uint64 avgBlockTime; + uint64 avgGasLimit; uint64 latestVerifiedHeight; uint64 latestVerifiedId; - uint64 nextBlockId; + uint64 avgProofTime; + uint64 reservedC1; } ``` diff --git a/packages/protocol/docs/L1/TaikoL1.md b/packages/protocol/docs/L1/TaikoL1.md index 7e18387f85..2d113249cb 100644 --- a/packages/protocol/docs/L1/TaikoL1.md +++ b/packages/protocol/docs/L1/TaikoL1.md @@ -9,13 +9,13 @@ struct LibData.State state ### \_\_gap ```solidity -uint256[45] __gap +uint256[42] __gap ``` ### init ```solidity -function init(address _addressManager, bytes32 _genesisBlockHash) external +function init(address _addressManager, bytes32 _genesisBlockHash, uint256 _feeBase) external ``` ### commitBlock @@ -51,7 +51,7 @@ Propose a Taiko L2 block. ### proveBlock ```solidity -function proveBlock(uint256 blockIndex, bytes[] inputs) external +function proveBlock(uint256 blockId, bytes[] inputs) external ``` Prove a block is valid with a zero-knowledge proof, a transaction @@ -59,15 +59,15 @@ merkel proof, and a receipt merkel proof. #### Parameters -| Name | Type | Description | -| ---------- | ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| blockIndex | uint256 | The index of the block to prove. This is also used to select the right implementation version. | -| inputs | bytes[] | A list of data input: - inputs[0] is an abi-encoded object with various information regarding the block to be proven and the actual proofs. - inputs[1] is the actual anchor transaction in this L2 block. Note that the anchor transaction is always the first transaction in the block. - inputs[2] is the receipt of the anchor transaction. | +| Name | Type | Description | +| ------- | ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| blockId | uint256 | The index of the block to prove. This is also used to select the right implementation version. | +| inputs | bytes[] | A list of data input: - inputs[0] is an abi-encoded object with various information regarding the block to be proven and the actual proofs. - inputs[1] is the actual anchor transaction in this L2 block. Note that the anchor transaction is always the first transaction in the block. - inputs[2] is the receipt of the anchor transaction. | ### proveBlockInvalid ```solidity -function proveBlockInvalid(uint256 blockIndex, bytes[] inputs) external +function proveBlockInvalid(uint256 blockId, bytes[] inputs) external ``` Prove a block is invalid with a zero-knowledge proof and a receipt @@ -75,10 +75,10 @@ merkel proof. #### Parameters -| Name | Type | Description | -| ---------- | ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| blockIndex | uint256 | The index of the block to prove. This is also used to select the right implementation version. | -| inputs | bytes[] | A list of data input: - inputs[0] An Evidence object with various information regarding the block to be proven and the actual proofs. - inputs[1] The target block to be proven invalid. - inputs[2] The receipt for the `invalidBlock` transaction on L2. Note that the `invalidBlock` transaction is supposed to be the only transaction in the L2 block. | +| Name | Type | Description | +| ------- | ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| blockId | uint256 | The index of the block to prove. This is also used to select the right implementation version. | +| inputs | bytes[] | A list of data input: - inputs[0] An Evidence object with various information regarding the block to be proven and the actual proofs. - inputs[1] The target block to be proven invalid. - inputs[2] The receipt for the `invalidBlock` transaction on L2. Note that the `invalidBlock` transaction is supposed to be the only transaction in the L2 block. | ### verifyBlocks @@ -94,6 +94,18 @@ Verify up to N blocks. | --------- | ------- | ------------------------------- | | maxBlocks | uint256 | Max number of blocks to verify. | +### getBlockFee + +```solidity +function getBlockFee() public view returns (uint256 premiumFee) +``` + +### getProofReward + +```solidity +function getProofReward(uint64 provenAt, uint64 proposedAt) public view returns (uint256 premiumReward) +``` + ### isCommitValid ```solidity diff --git a/packages/protocol/docs/L1/v1/V1Finalizing.md b/packages/protocol/docs/L1/v1/V1Finalizing.md index 1dcf04bbe0..23129248ef 100644 --- a/packages/protocol/docs/L1/v1/V1Finalizing.md +++ b/packages/protocol/docs/L1/v1/V1Finalizing.md @@ -1,4 +1,4 @@ -## V1Finalizing +## V1Verifying ### BlockVerified @@ -15,11 +15,11 @@ event HeaderSynced(uint256 height, uint256 srcHeight, bytes32 srcHash) ### init ```solidity -function init(struct LibData.State s, bytes32 _genesisBlockHash) public +function init(struct LibData.State s, bytes32 _genesisBlockHash, uint256 _feeBase) public ``` ### verifyBlocks -```solidity -function verifyBlocks(struct LibData.State s, uint256 maxBlocks) public +``` + ``` diff --git a/packages/protocol/docs/L1/v1/V1Proposing.md b/packages/protocol/docs/L1/v1/V1Proposing.md index 2f2ce24c40..01dce60bd9 100644 --- a/packages/protocol/docs/L1/v1/V1Proposing.md +++ b/packages/protocol/docs/L1/v1/V1Proposing.md @@ -21,7 +21,13 @@ function commitBlock(struct LibData.State s, bytes32 commitHash) public ### proposeBlock ```solidity -function proposeBlock(struct LibData.State s, bytes[] inputs) public +function proposeBlock(struct LibData.State s, contract AddressResolver resolver, bytes[] inputs) public +``` + +### getBlockFee + +```solidity +function getBlockFee(struct LibData.State s) public view returns (uint256 fee, uint256 premiumFee) ``` ### isCommitValid @@ -30,6 +36,12 @@ function proposeBlock(struct LibData.State s, bytes[] inputs) public function isCommitValid(struct LibData.State s, bytes32 hash) public view returns (bool) ``` +### \_calcProposerBootstrapReward + +```solidity +function _calcProposerBootstrapReward(struct LibData.State s) private view returns (uint256 proposerReward) +``` + ### \_validateMetadata ```solidity diff --git a/packages/protocol/docs/L1/v1/V1Proving.md b/packages/protocol/docs/L1/v1/V1Proving.md index 7f56bcc27e..bbdee912ff 100644 --- a/packages/protocol/docs/L1/v1/V1Proving.md +++ b/packages/protocol/docs/L1/v1/V1Proving.md @@ -20,13 +20,13 @@ event BlockProven(uint256 id, bytes32 parentHash, bytes32 blockHash, uint64 time ### proveBlock ```solidity -function proveBlock(struct LibData.State s, contract AddressResolver resolver, uint256 blockIndex, bytes[] inputs) public +function proveBlock(struct LibData.State s, contract AddressResolver resolver, uint256 blockId, bytes[] inputs) public ``` ### proveBlockInvalid ```solidity -function proveBlockInvalid(struct LibData.State s, contract AddressResolver resolver, uint256 blockIndex, bytes[] inputs) public +function proveBlockInvalid(struct LibData.State s, contract AddressResolver resolver, uint256 blockId, bytes[] inputs) public ``` ### \_proveBlock diff --git a/packages/protocol/docs/L1/v1/V1Utils.md b/packages/protocol/docs/L1/v1/V1Utils.md new file mode 100644 index 0000000000..b6aad8d139 --- /dev/null +++ b/packages/protocol/docs/L1/v1/V1Utils.md @@ -0,0 +1,19 @@ +## V1Utils + +### feeScaleBeta + +```solidity +function feeScaleBeta(struct LibData.State s, uint256 fee, bool releaseOneSlot) public view returns (uint256) +``` + +### movingAverage + +```solidity +function movingAverage(uint256 ma, uint256 v, uint256 factor) internal pure returns (uint256) +``` + +### feeScaleAlpha + +```solidity +function feeScaleAlpha(uint64 tNow, uint64 tLast, uint64 tAvg) internal pure returns (uint256) +``` diff --git a/packages/protocol/docs/L2/V1TaikoL2.md b/packages/protocol/docs/L2/V1TaikoL2.md index 643da92147..7cd8b1594f 100644 --- a/packages/protocol/docs/L2/V1TaikoL2.md +++ b/packages/protocol/docs/L2/V1TaikoL2.md @@ -109,5 +109,5 @@ function _checkPublicInputs() private ### \_hashPublicInputs ```solidity -function _hashPublicInputs(uint256 chainId, uint256 number, uint256 baseFee, bytes32[255] ancestors) private pure returns (bytes32) +function _hashPublicInputs(uint256 chainId, uint256 number, uint256 feeBase, bytes32[255] ancestors) private pure returns (bytes32) ``` diff --git a/packages/protocol/docs/libs/LibAnchorSignature.md b/packages/protocol/docs/libs/LibAnchorSignature.md index ac0531d134..3d7fe857a4 100644 --- a/packages/protocol/docs/libs/LibAnchorSignature.md +++ b/packages/protocol/docs/libs/LibAnchorSignature.md @@ -1,15 +1,15 @@ ## LibAnchorSignature -### TAIKO_GOLDEN_TOUCH_ADDRESS +### K_GOLDEN_TOUCH_ADDRESS ```solidity -address TAIKO_GOLDEN_TOUCH_ADDRESS +address K_GOLDEN_TOUCH_ADDRESS ``` -### TAIKO_GOLDEN_TOUCH_PRIVATEKEY +### K_GOLDEN_TOUCH_PRIVATEKEY ```solidity -uint256 TAIKO_GOLDEN_TOUCH_PRIVATEKEY +uint256 K_GOLDEN_TOUCH_PRIVATEKEY ``` ### GX diff --git a/packages/protocol/docs/libs/LibConstants.md b/packages/protocol/docs/libs/LibConstants.md index 812205af2d..cb224c99a1 100644 --- a/packages/protocol/docs/libs/LibConstants.md +++ b/packages/protocol/docs/libs/LibConstants.md @@ -1,79 +1,109 @@ ## LibConstants -### TAIKO_CHAIN_ID +### K_CHAIN_ID ```solidity -uint256 TAIKO_CHAIN_ID +uint256 K_CHAIN_ID ``` -### TAIKO_MAX_PROPOSED_BLOCKS +### K_MAX_NUM_BLOCKS ```solidity -uint256 TAIKO_MAX_PROPOSED_BLOCKS +uint256 K_MAX_NUM_BLOCKS ``` -### TAIKO_MAX_VERIFICATIONS_PER_TX +### K_FEE_PREMIUM_PHI ```solidity -uint256 TAIKO_MAX_VERIFICATIONS_PER_TX +uint256 K_FEE_PREMIUM_PHI ``` -### TAIKO_COMMIT_DELAY_CONFIRMATIONS +### K_REWARD_MULTIPLIER_PCTG ```solidity -uint256 TAIKO_COMMIT_DELAY_CONFIRMATIONS +uint64 K_REWARD_MULTIPLIER_PCTG ``` -### TAIKO_MAX_PROOFS_PER_FORK_CHOICE +### K_FEE_GRACE_PERIOD_PCTG ```solidity -uint256 TAIKO_MAX_PROOFS_PER_FORK_CHOICE +uint64 K_FEE_GRACE_PERIOD_PCTG ``` -### TAIKO_BLOCK_MAX_GAS_LIMIT +### K_FEE_MAX_PERIOD_PCTG ```solidity -uint256 TAIKO_BLOCK_MAX_GAS_LIMIT +uint64 K_FEE_MAX_PERIOD_PCTG ``` -### TAIKO_BLOCK_MAX_TXS +### K_MAX_FINALIZATIONS_PER_TX ```solidity -uint256 TAIKO_BLOCK_MAX_TXS +uint256 K_MAX_FINALIZATIONS_PER_TX ``` -### TAIKO_BLOCK_DEADEND_HASH +### K_COMMIT_DELAY_CONFIRMS ```solidity -bytes32 TAIKO_BLOCK_DEADEND_HASH +uint256 K_COMMIT_DELAY_CONFIRMS ``` -### TAIKO_TXLIST_MAX_BYTES +### K_MAX_PROOFS_PER_FORK_CHOICE ```solidity -uint256 TAIKO_TXLIST_MAX_BYTES +uint256 K_MAX_PROOFS_PER_FORK_CHOICE ``` -### TAIKO_TX_MIN_GAS_LIMIT +### K_BLOCK_MAX_GAS_LIMIT ```solidity -uint256 TAIKO_TX_MIN_GAS_LIMIT +uint256 K_BLOCK_MAX_GAS_LIMIT ``` -### V1_ANCHOR_TX_GAS_LIMIT +### K_BLOCK_MAX_TXS ```solidity -uint256 V1_ANCHOR_TX_GAS_LIMIT +uint256 K_BLOCK_MAX_TXS ``` -### V1_ANCHOR_TX_SELECTOR +### K_BLOCK_DEADEND_HASH ```solidity -bytes4 V1_ANCHOR_TX_SELECTOR +bytes32 K_BLOCK_DEADEND_HASH ``` -### V1_INVALIDATE_BLOCK_LOG_TOPIC +### K_TXLIST_MAX_BYTES ```solidity -bytes32 V1_INVALIDATE_BLOCK_LOG_TOPIC +uint256 K_TXLIST_MAX_BYTES +``` + +### K_TX_MIN_GAS_LIMIT + +```solidity +uint256 K_TX_MIN_GAS_LIMIT +``` + +### K_REWARD_BURN_BP + +```solidity +uint256 K_REWARD_BURN_BP +``` + +### K_ANCHOR_TX_GAS_LIMIT + +```solidity +uint256 K_ANCHOR_TX_GAS_LIMIT +``` + +### K_ANCHOR_TX_SELECTOR + +```solidity +bytes4 K_ANCHOR_TX_SELECTOR +``` + +### K_INVALIDATE_BLOCK_LOG_TOPIC + +```solidity +bytes32 K_INVALIDATE_BLOCK_LOG_TOPIC ``` diff --git a/packages/protocol/docs/libs/LibInvalidTxList.md b/packages/protocol/docs/libs/LibInvalidTxList.md index 2a2c134cc9..15ff494ca5 100644 --- a/packages/protocol/docs/libs/LibInvalidTxList.md +++ b/packages/protocol/docs/libs/LibInvalidTxList.md @@ -4,11 +4,11 @@ A library to invalidate a txList using the following rules: A txList is valid if and only if: -1. The txList's length is no more than `TAIKO_TXLIST_MAX_BYTES`. +1. The txList's length is no more than `K_TXLIST_MAX_BYTES`. 2. The txList is well-formed RLP, with no additional trailing bytes. -3. The total number of transactions is no more than `TAIKO_BLOCK_MAX_TXS`. +3. The total number of transactions is no more than `K_BLOCK_MAX_TXS`. 4. The sum of all transaction gas limit is no more than - `TAIKO_BLOCK_MAX_GAS_LIMIT`. + `K_BLOCK_MAX_GAS_LIMIT`. A transaction is valid if and only if: @@ -16,7 +16,7 @@ A transaction is valid if and only if: (rule #1 in Ethereum yellow paper). 2. The transaction's signature is valid (rule #2 in Ethereum yellow paper). 3. The transaction's the gas limit is no smaller than the intrinsic gas - `TAIKO_TX_MIN_GAS_LIMIT` (rule #5 in Ethereum yellow paper). + `K_TX_MIN_GAS_LIMIT` (rule #5 in Ethereum yellow paper). ### Reason diff --git a/packages/protocol/hardhat.config.ts b/packages/protocol/hardhat.config.ts index e5b50213f5..d5f0ec648b 100644 --- a/packages/protocol/hardhat.config.ts +++ b/packages/protocol/hardhat.config.ts @@ -92,8 +92,8 @@ const config: HardhatUserConfig = { eachLine: () => ({ transform: (line) => { for (const constantName of [ - "TAIKO_CHAIN_ID", - "K_COMMIT_DELAY_CONFIRMATIONS", + "K_CHAIN_ID", + "K_COMMIT_DELAY_CONFIRMS", "TAIKO_BLOCK_MAX_TXS", "TAIKO_TXLIST_MAX_BYTES", "TAIKO_BLOCK_MAX_GAS_LIMIT", diff --git a/packages/protocol/tasks/config.ts b/packages/protocol/tasks/config.ts index 22ae3d79c9..de7f7afc3b 100644 --- a/packages/protocol/tasks/config.ts +++ b/packages/protocol/tasks/config.ts @@ -1,3 +1,3 @@ // https://github.com/ethereum-lists/chains/pull/1611 -export const TAIKO_CHAINID = 167 -export const DEFAULT_DEPLOY_CONFIRMATIONS = 12 +export const K_CHAIN_ID = 167 +export const K_DEPLOY_CONFIRMATIONS = 12 diff --git a/packages/protocol/tasks/deploy_L1.ts b/packages/protocol/tasks/deploy_L1.ts index 07fcd7a7f4..103b8121ff 100644 --- a/packages/protocol/tasks/deploy_L1.ts +++ b/packages/protocol/tasks/deploy_L1.ts @@ -18,16 +18,11 @@ task("deploy_L1") "L2 genesis block hash", ethers.constants.HashZero ) - .addOptionalParam( - "l2ChainId", - "L2 chain id", - config.TAIKO_CHAINID, - types.int - ) + .addOptionalParam("l2ChainId", "L2 chain id", config.K_CHAIN_ID, types.int) .addOptionalParam( "confirmations", "Number of confirmations to wait for deploy transaction.", - config.DEFAULT_DEPLOY_CONFIRMATIONS, + config.K_DEPLOY_CONFIRMATIONS, types.int ) .setAction(async (args, hre: any) => { @@ -112,9 +107,11 @@ export async function deployContracts(hre: any) { "TaikoL1", await deployBaseLibs(hre) ) + const feeBase = hre.ethers.BigNumber.from(10).pow(18) + await utils.waitTx( hre, - await TaikoL1.init(AddressManager.address, l2GenesisBlockHash) + await TaikoL1.init(AddressManager.address, l2GenesisBlockHash, feeBase) ) // Used by LibBridgeRead @@ -163,26 +160,19 @@ async function deployBaseLibs(hre: any) { ) const libTxDecoder = await utils.deployContract(hre, "LibTxDecoder") - const v1Utils = await utils.deployContract(hre, "V1Utils") - const v1Finalizing = await utils.deployContract(hre, "V1Finalizing", { - V1Utils: v1Utils.address, - }) - const v1Proposing = await utils.deployContract(hre, "V1Proposing", { - V1Utils: v1Utils.address, - }) + const v1Verifying = await utils.deployContract(hre, "V1Verifying", {}) + const v1Proposing = await utils.deployContract(hre, "V1Proposing", {}) const v1Proving = await utils.deployContract(hre, "V1Proving", { LibZKP: libZKP.address, LibReceiptDecoder: libReceiptDecoder.address, LibTxDecoder: libTxDecoder.address, - V1Utils: v1Utils.address, }) return { - V1Finalizing: v1Finalizing.address, + V1Verifying: v1Verifying.address, V1Proposing: v1Proposing.address, V1Proving: v1Proving.address, - V1Utils: v1Utils.address, } } diff --git a/packages/protocol/test/L1/TaikoL1.test.ts b/packages/protocol/test/L1/TaikoL1.test.ts index 03ee88752c..fa8a92d942 100644 --- a/packages/protocol/test/L1/TaikoL1.test.ts +++ b/packages/protocol/test/L1/TaikoL1.test.ts @@ -1,5 +1,6 @@ import { expect } from "chai" import { ethers } from "hardhat" +import { BigNumber } from "ethers" describe("TaikoL1", function () { async function deployTaikoL1Fixture() { @@ -21,16 +22,8 @@ describe("TaikoL1", function () { await ethers.getContractFactory("LibZKP") ).deploy() - const v1Utils = await ( - await ethers.getContractFactory("V1Utils") - ).deploy() - const v1Proposing = await ( - await ethers.getContractFactory("V1Proposing", { - libraries: { - V1Utils: v1Utils.address, - }, - }) + await ethers.getContractFactory("V1Proposing") ).deploy() const v1Proving = await ( @@ -39,31 +32,26 @@ describe("TaikoL1", function () { LibReceiptDecoder: libReceiptDecoder.address, LibTxDecoder: libTxDecoder.address, LibZKP: libZKP.address, - V1Utils: v1Utils.address, }, }) ).deploy() - const v1Finalizing = await ( - await ethers.getContractFactory("V1Finalizing", { - libraries: { - V1Utils: v1Utils.address, - }, - }) + const v1Verifying = await ( + await ethers.getContractFactory("V1Verifying") ).deploy() const TaikoL1Factory = await ethers.getContractFactory("TaikoL1", { libraries: { - V1Finalizing: v1Finalizing.address, + V1Verifying: v1Verifying.address, V1Proposing: v1Proposing.address, V1Proving: v1Proving.address, - V1Utils: v1Utils.address, }, }) const genesisHash = randomBytes32() const taikoL1 = await TaikoL1Factory.deploy() - await taikoL1.init(addressManager.address, genesisHash) + const feeBase = BigNumber.from(10).pow(18) + await taikoL1.init(addressManager.address, genesisHash, feeBase) return { taikoL1, genesisHash } } diff --git a/packages/protocol/test/bridge/libs/LibBridgeData.test.ts b/packages/protocol/test/bridge/libs/LibBridgeData.test.ts index 84a633d150..9729cfc3c6 100644 --- a/packages/protocol/test/bridge/libs/LibBridgeData.test.ts +++ b/packages/protocol/test/bridge/libs/LibBridgeData.test.ts @@ -1,6 +1,6 @@ import { expect } from "chai" import { ethers } from "hardhat" -import { TAIKO_BRIDGE_MESSAGE } from "../../constants/messages" +import { K_BRIDGE_MESSAGE } from "../../constants/messages" import { MessageStatus } from "../../../tasks/utils" describe("LibBridgeData", function () { @@ -32,7 +32,7 @@ describe("LibBridgeData", function () { "tuple(uint256 id, address sender, uint256 srcChainId, uint256 destChainId, address owner, address to, address refundAddress, uint256 depositValue, uint256 callValue, uint256 processingFee, uint256 gasLimit, bytes data, string memo)", ] - const testVar = [TAIKO_BRIDGE_MESSAGE, testMessage] + const testVar = [K_BRIDGE_MESSAGE, testMessage] return { owner, @@ -51,7 +51,7 @@ describe("LibBridgeData", function () { await deployLibBridgeDataFixture() // dummy struct to test with - const testVar = [TAIKO_BRIDGE_MESSAGE, testMessage] + const testVar = [K_BRIDGE_MESSAGE, testMessage] const hashed = await libData.hashMessage(testMessage) const expectedEncoded = ethers.utils.defaultAbiCoder.encode( testTypes, diff --git a/packages/protocol/test/constants/messages.ts b/packages/protocol/test/constants/messages.ts index 212301c9f8..c6880c6fd0 100644 --- a/packages/protocol/test/constants/messages.ts +++ b/packages/protocol/test/constants/messages.ts @@ -1,3 +1,3 @@ -const TAIKO_BRIDGE_MESSAGE = "TAIKO_BRIDGE_MESSAGE" +const K_BRIDGE_MESSAGE = "TAIKO_BRIDGE_MESSAGE" -export { TAIKO_BRIDGE_MESSAGE } +export { K_BRIDGE_MESSAGE } diff --git a/packages/protocol/test/genesis/generate_genesis.test.ts b/packages/protocol/test/genesis/generate_genesis.test.ts index f92955c6bf..3ffafc6624 100644 --- a/packages/protocol/test/genesis/generate_genesis.test.ts +++ b/packages/protocol/test/genesis/generate_genesis.test.ts @@ -183,7 +183,7 @@ action("Generate Genesis", function () { const tx = await V1TaikoL2.invalidateBlock( bytes, - 5, // hint: TX_INVALID_SIG + 6, // hint: TX_INVALID_SIG 0 ) diff --git a/packages/protocol/test/libs/LibTxUtils.test.ts b/packages/protocol/test/libs/LibTxUtils.test.ts index 4dc9a82a84..ae92f04db9 100644 --- a/packages/protocol/test/libs/LibTxUtils.test.ts +++ b/packages/protocol/test/libs/LibTxUtils.test.ts @@ -30,7 +30,7 @@ describe("LibTxUtils", function () { await ethers.getContractFactory("TestLibRLPWriter") ).deploy() - chainId = (await libConstants.TAIKO_CHAIN_ID()).toNumber() + chainId = (await libConstants.K_CHAIN_ID()).toNumber() const unsignedLegacyTx: UnsignedTransaction = { type: 0,