From baea8c4f0d1eb15e5284724d5829472b314d35fc Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 14 May 2020 18:33:45 -0400 Subject: [PATCH 01/37] add RollupTxQueue --- .../contracts/RollupTransactionQueue.sol | 27 ++++ .../RollupTransactionQueue.spec.ts | 137 ++++++++++++++++++ 2 files changed, 164 insertions(+) create mode 100644 packages/rollup-contracts/contracts/RollupTransactionQueue.sol create mode 100644 packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts diff --git a/packages/rollup-contracts/contracts/RollupTransactionQueue.sol b/packages/rollup-contracts/contracts/RollupTransactionQueue.sol new file mode 100644 index 0000000000000..8289519e7ebf9 --- /dev/null +++ b/packages/rollup-contracts/contracts/RollupTransactionQueue.sol @@ -0,0 +1,27 @@ +pragma solidity ^0.5.0; +pragma experimental ABIEncoderV2; + +/* Internal Imports */ +import {RollupList} from "./RollupList.sol"; + +contract RollupTransactionQueue is RollupList { + address public sequencer; + address public canonicalTransactionChain; + + constructor( + address _rollupMerkleUtilsAddress, + address _sequencer, + address _canonicalTransactionChain + ) RollupList(_rollupMerkleUtilsAddress) public { + sequencer = _sequencer; + canonicalTransactionChain = _canonicalTransactionChain; + } + + function authenticateEnqueue(address _sender) public view returns (bool) { + return _sender == sequencer; + } + function authenticateDequeue(address _sender) public view returns (bool) { + return _sender == canonicalTransactionChain; + } + function authenticateDelete(address _sender) public view returns (bool) { return false; } +} diff --git a/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts new file mode 100644 index 0000000000000..d82cabd7fd55f --- /dev/null +++ b/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts @@ -0,0 +1,137 @@ +import '../setup' + +/* External Imports */ +import { getLogger } from '@eth-optimism/core-utils' +import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' + +/* Internal Imports */ +import { DefaultRollupBlock } from './RLhelper' + +/* Logging */ +const log = getLogger('rollup-tx-queue', true) + +/* Contract Imports */ +import * as RollupTransactionQueue from '../../build/RollupTransactionQueue.json' +import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' + +/* Begin tests */ +describe('RollupTransactionQueue', () => { + const provider = createMockProvider() + const [wallet, sequencer, canonicalTransactionChain] = getWallets(provider) + let rollupTxQueue + let rollupMerkleUtils + + /* Link libraries before tests */ + before(async () => { + rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { + gasLimit: 6700000, + }) + }) + + /* Deploy a new RollupChain before each test */ + beforeEach(async () => { + rollupTxQueue = await deployContract( + wallet, + RollupTransactionQueue, + [ + rollupMerkleUtils.address, + sequencer.address, + canonicalTransactionChain.address, + ], + { + gasLimit: 6700000, + } + ) + }) + + const enqueueAndGenerateBlock = async ( + block: string[], + blockIndex: number, + cumulativePrevElements: number + ): Promise => { + // Submit the rollup block on-chain + const enqueueTx = await rollupTxQueue.connect(sequencer).enqueueBlock(block) + const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) + // Generate a local version of the rollup block + const ethBlockNumber = txReceipt.blockNumber + const localBlock = new DefaultRollupBlock( + ethBlockNumber, + blockIndex, + cumulativePrevElements, + block + ) + await localBlock.generateTree() + return localBlock + } + + /* + * Test enqueueBlock() + */ + describe('enqueueBlock() ', async () => { + it('should allow enqueue from sequencer', async () => { + const block = ['0x1234'] + await rollupTxQueue.connect(sequencer).enqueueBlock(block) // Did not throw... success! + }) + it('should not allow enqueue from other address', async () => { + const block = ['0x1234'] + await rollupTxQueue + .enqueueBlock(block) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' + ) + }) + }) + /* + * Test dequeueBlock() + */ + describe('dequeueBlock() ', async () => { + it('should allow dequeue from canonicalTransactionChain', async () => { + const block = ['0x1234'] + const cumulativePrevElements = 0 + const blockIndex = 0 + const localBlock = await enqueueAndGenerateBlock( + block, + blockIndex, + cumulativePrevElements + ) + let blocksLength = await rollupTxQueue.getBlocksLength() + log.debug(`blocksLength before deletion: ${blocksLength}`) + let front = await rollupTxQueue.front() + log.debug(`front before deletion: ${front}`) + let firstBlockHash = await rollupTxQueue.blocks(0) + log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) + + // delete the single appended block + await rollupTxQueue + .connect(canonicalTransactionChain) + .dequeueBeforeInclusive(blockIndex) + + blocksLength = await rollupTxQueue.getBlocksLength() + log.debug(`blocksLength after deletion: ${blocksLength}`) + blocksLength.should.equal(1) + firstBlockHash = await rollupTxQueue.blocks(0) + log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) + firstBlockHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + front = await rollupTxQueue.front() + log.debug(`front after deletion: ${front}`) + front.should.equal(1) + }) + it('should not allow dequeue from other address', async () => { + const block = ['0x1234'] + const cumulativePrevElements = 0 + const blockIndex = 0 + const localBlock = await enqueueAndGenerateBlock( + block, + blockIndex, + cumulativePrevElements + ) + await rollupTxQueue + .dequeueBeforeInclusive(blockIndex) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' + ) + }) + }) +}) From dfcff54c3081bacefbd1e29b77ea33d4774088cd Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 14 May 2020 18:34:00 -0400 Subject: [PATCH 02/37] remove unused imports --- packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol b/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol index 027918e04f5eb..f9e8b24dd4b9d 100644 --- a/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol +++ b/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol @@ -2,8 +2,6 @@ pragma solidity ^0.5.0; pragma experimental ABIEncoderV2; /* Internal Imports */ -import {DataTypes as dt} from "./DataTypes.sol"; -import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; import {RollupList} from "./RollupList.sol"; contract L1ToL2TransactionQueue is RollupList { From 206b1ae53932da2f58bd57ebea4ac5e7b5e6db6b Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Sun, 17 May 2020 18:48:26 -0400 Subject: [PATCH 03/37] replace blockNumber with timestamp for canonicalTxChain --- .../contracts/CanonicalTransactionChain.sol | 66 ++ .../CanonicalTransactionChain.spec.ts | 105 +++ .../L1ToL2TransactionQueue.spec.ts | 240 +++--- .../test/rollup-list/RLhelper.ts | 12 +- .../test/rollup-list/RollupList.spec.ts | 716 +++++++++--------- .../RollupTransactionQueue.spec.ts | 228 +++--- 6 files changed, 769 insertions(+), 598 deletions(-) create mode 100644 packages/rollup-contracts/contracts/CanonicalTransactionChain.sol create mode 100644 packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol new file mode 100644 index 0000000000000..805e53a16c7bb --- /dev/null +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -0,0 +1,66 @@ +pragma solidity ^0.5.0; +pragma experimental ABIEncoderV2; + +/* Internal Imports */ +import {RollupList} from "./RollupList.sol"; + +contract CanonicalTransactionChain is RollupList { + address public sequencer; + address public canonicalTransactionChain; + + constructor( + address _rollupMerkleUtilsAddress, + address _sequencer, + address _canonicalTransactionChain + ) RollupList(_rollupMerkleUtilsAddress) public { + sequencer = _sequencer; + canonicalTransactionChain = _canonicalTransactionChain; + } + + function authenticateEnqueue(address _sender) public view returns (bool) { + return _sender == sequencer; + } + function authenticateDequeue(address _sender) public view returns (bool) { return false; } + function authenticateDelete(address _sender) public view returns (bool) { return false; } + + // appends to the current list of blocks + function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { + //Check that msg.sender is authorized to append + require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); + require(_txBatch.length > 0, "Cannot submit an empty block"); + + // require(_timestamp > lastOVMTimestamp, "timestamps must monotonically increase"); + // lastOVMTimestamp = _timestamp; + // require dist(_timestamp, block.timestamp) < sequencerLivenessAssumption + // require(L1ToL2Queue.ageOfOldestQueuedBlock() < sequencerLivenessAssumption, "must process all L1->L2 blocks older than liveness assumption before processing L2 blocks.") + + + // calculate block header + bytes32 blockHeaderHash = keccak256(abi.encodePacked( + _timestamp, //timestamp, duh + _timestamp, + merkleUtils.getMerkleRoot(_txBatch), // elementsMerkleRoot + _txBatch.length, // numElementsInBlock + cumulativeNumElements // cumulativeNumElements + )); + // store block header + blocks.push(blockHeaderHash); + // update cumulative elements + cumulativeNumElements += _txBatch.length; + + + + // // calculate block header + // bytes32 blockHeaderHash = keccak256(abi.encodePacked( + // _timestamp, //timestamp, duh + // false, //isL1ToL2Tx + // merkleUtils.getMerkleRoot(_txBatch), // elementsMerkleRoot + // _txBatch.length, // numElementsInBlock + // cumulativeNumElements // cumulativePrevElements + // )); + // // store block header + // blocks.push(blockHeaderHash); + // // update cumulative elements + // cumulativeNumElements += _txBatch.length; + } +} diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts new file mode 100644 index 0000000000000..fc13c29b72165 --- /dev/null +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -0,0 +1,105 @@ +import '../setup' + +/* External Imports */ +import { getLogger } from '@eth-optimism/core-utils' +import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' + +/* Internal Imports */ +import { DefaultRollupBlock } from './RLhelper' + +/* Logging */ +const log = getLogger('rollup-tx-queue', true) + +/* Contract Imports */ +import * as CanonicalTransactionChain from '../../build/CanonicalTransactionChain.json' +import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' + +/* Begin tests */ +describe.only('CanonicalTransactionChain', () => { + const provider = createMockProvider() + const [wallet, sequencer, canonicalTransactionChain] = getWallets(provider) + let canonicalTxChain + let rollupMerkleUtils + + /* Link libraries before tests */ + before(async () => { + rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { + gasLimit: 6700000, + }) + }) + + /* Deploy a new RollupChain before each test */ + beforeEach(async () => { + canonicalTxChain = await deployContract( + wallet, + CanonicalTransactionChain, + [ + rollupMerkleUtils.address, + sequencer.address, + canonicalTransactionChain.address, + ], + { + gasLimit: 6700000, + } + ) + }) + + const enqueueAndGenerateBlock = async ( + block: string[], + timestamp: number, + blockIndex: number, + cumulativePrevElements: number + ): Promise => { + // Submit the rollup block on-chain + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(block, timestamp) + // Generate a local version of the rollup block + const localBlock = new DefaultRollupBlock( + timestamp, + blockIndex, + cumulativePrevElements, + block + ) + await localBlock.generateTree() + return localBlock + } + + /* + * Test enqueueBlock() + */ + describe('appendTransactionBatch() ', async () => { + it('should allow appendTransactionBatch from sequencer', async () => { + const block = ['0x1234', '0x6578'] + const timestamp = 0 + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(block, timestamp) // Did not throw... success! + }) + it('should not allow appendTransactionBatch from other address', async () => { + const block = ['0x1234', '0x6578'] + const timestamp = 0 + await canonicalTxChain + .appendTransactionBatch(block, timestamp) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' + ) + }) + it.only('should calculate blockHeaderHash correctly', async () => { + const block = ['0x1234', '0x5678'] + const blockIndex = 0 + const cumulativePrevElements = 0 + const timestamp = 0 + const localBlock = await enqueueAndGenerateBlock( + block, + timestamp, + blockIndex, + cumulativePrevElements + ) + //Check blockHeaderHash + const expectedBlockHeaderHash = await localBlock.hashBlockHeader() + const calculatedBlockHeaderHash = await canonicalTxChain.blocks(0) + calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + }) + }) +}) diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index 79e04ac2a1965..dfb8bbef0c9e2 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -15,129 +15,129 @@ import * as L1ToL2TransactionQueue from '../../build/L1ToL2TransactionQueue.json import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ -describe('L1ToL2TransactionQueue', () => { - const provider = createMockProvider() - const [ - wallet, - l1ToL2TransactionPasser, - canonicalTransactionChain, - ] = getWallets(provider) - let l1ToL2TxQueue - let rollupMerkleUtils +// describe('L1ToL2TransactionQueue', () => { +// const provider = createMockProvider() +// const [ +// wallet, +// l1ToL2TransactionPasser, +// canonicalTransactionChain, +// ] = getWallets(provider) +// let l1ToL2TxQueue +// let rollupMerkleUtils - /* Link libraries before tests */ - before(async () => { - rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { - gasLimit: 6700000, - }) - }) +// /* Link libraries before tests */ +// before(async () => { +// rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { +// gasLimit: 6700000, +// }) +// }) - /* Deploy a new RollupChain before each test */ - beforeEach(async () => { - l1ToL2TxQueue = await deployContract( - wallet, - L1ToL2TransactionQueue, - [ - rollupMerkleUtils.address, - l1ToL2TransactionPasser.address, - canonicalTransactionChain.address, - ], - { - gasLimit: 6700000, - } - ) - }) +// /* Deploy a new RollupChain before each test */ +// beforeEach(async () => { +// l1ToL2TxQueue = await deployContract( +// wallet, +// L1ToL2TransactionQueue, +// [ +// rollupMerkleUtils.address, +// l1ToL2TransactionPasser.address, +// canonicalTransactionChain.address, +// ], +// { +// gasLimit: 6700000, +// } +// ) +// }) - const enqueueAndGenerateBlock = async ( - block: string[], - blockIndex: number, - cumulativePrevElements: number - ): Promise => { - // Submit the rollup block on-chain - const enqueueTx = await l1ToL2TxQueue - .connect(l1ToL2TransactionPasser) - .enqueueBlock(block) - const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) - // Generate a local version of the rollup block - const ethBlockNumber = txReceipt.blockNumber - const localBlock = new DefaultRollupBlock( - ethBlockNumber, - blockIndex, - cumulativePrevElements, - block - ) - await localBlock.generateTree() - return localBlock - } +// const enqueueAndGenerateBlock = async ( +// block: string[], +// blockIndex: number, +// cumulativePrevElements: number +// ): Promise => { +// // Submit the rollup block on-chain +// const enqueueTx = await l1ToL2TxQueue +// .connect(l1ToL2TransactionPasser) +// .enqueueBlock(block) +// const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) +// // Generate a local version of the rollup block +// const ethBlockNumber = txReceipt.blockNumber +// const localBlock = new DefaultRollupBlock( +// ethBlockNumber, +// blockIndex, +// cumulativePrevElements, +// block +// ) +// await localBlock.generateTree() +// return localBlock +// } - /* - * Test enqueueBlock() - */ - describe('enqueueBlock() ', async () => { - it('should allow enqueue from l1ToL2TransactionPasser', async () => { - const block = ['0x1234'] - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) // Did not throw... success! - }) - it('should not allow enqueue from other address', async () => { - const block = ['0x1234'] - await l1ToL2TxQueue - .enqueueBlock(block) - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' - ) - }) - }) - /* - * Test dequeueBlock() - */ - describe('dequeueBlock() ', async () => { - it('should allow dequeue from canonicalTransactionChain', async () => { - const block = ['0x1234'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - let blocksLength = await l1ToL2TxQueue.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) - let front = await l1ToL2TxQueue.front() - log.debug(`front before deletion: ${front}`) - let firstBlockHash = await l1ToL2TxQueue.blocks(0) - log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) +// /* +// * Test enqueueBlock() +// */ +// describe('enqueueBlock() ', async () => { +// it('should allow enqueue from l1ToL2TransactionPasser', async () => { +// const block = ['0x1234'] +// await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) // Did not throw... success! +// }) +// it('should not allow enqueue from other address', async () => { +// const block = ['0x1234'] +// await l1ToL2TxQueue +// .enqueueBlock(block) +// .should.be.revertedWith( +// 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' +// ) +// }) +// }) +// /* +// * Test dequeueBlock() +// */ +// describe('dequeueBlock() ', async () => { +// it('should allow dequeue from canonicalTransactionChain', async () => { +// const block = ['0x1234'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// let blocksLength = await l1ToL2TxQueue.getBlocksLength() +// log.debug(`blocksLength before deletion: ${blocksLength}`) +// let front = await l1ToL2TxQueue.front() +// log.debug(`front before deletion: ${front}`) +// let firstBlockHash = await l1ToL2TxQueue.blocks(0) +// log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) - // delete the single appended block - await l1ToL2TxQueue - .connect(canonicalTransactionChain) - .dequeueBeforeInclusive(blockIndex) +// // delete the single appended block +// await l1ToL2TxQueue +// .connect(canonicalTransactionChain) +// .dequeueBeforeInclusive(blockIndex) - blocksLength = await l1ToL2TxQueue.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(1) - firstBlockHash = await l1ToL2TxQueue.blocks(0) - log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) - firstBlockHash.should.equal( - '0x0000000000000000000000000000000000000000000000000000000000000000' - ) - front = await l1ToL2TxQueue.front() - log.debug(`front after deletion: ${front}`) - front.should.equal(1) - }) - it('should not allow dequeue from other address', async () => { - const block = ['0x1234'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - await l1ToL2TxQueue - .dequeueBeforeInclusive(blockIndex) - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' - ) - }) - }) -}) +// blocksLength = await l1ToL2TxQueue.getBlocksLength() +// log.debug(`blocksLength after deletion: ${blocksLength}`) +// blocksLength.should.equal(1) +// firstBlockHash = await l1ToL2TxQueue.blocks(0) +// log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) +// firstBlockHash.should.equal( +// '0x0000000000000000000000000000000000000000000000000000000000000000' +// ) +// front = await l1ToL2TxQueue.front() +// log.debug(`front after deletion: ${front}`) +// front.should.equal(1) +// }) +// it('should not allow dequeue from other address', async () => { +// const block = ['0x1234'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// await l1ToL2TxQueue +// .dequeueBeforeInclusive(blockIndex) +// .should.be.revertedWith( +// 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' +// ) +// }) +// }) +// }) diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index a95b78c24a077..95c14a781d467 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -11,7 +11,7 @@ import { newInMemoryDB, SparseMerkleTreeImpl } from '@eth-optimism/core-db' import { utils } from 'ethers' interface BlockHeader { - ethBlockNumber: number + timestamp: number elementsMerkleRoot: string numElementsInBlock: number cumulativePrevElements: number @@ -30,19 +30,19 @@ interface ElementInclusionProof { * as well as the merkle tree which it generates. */ export class DefaultRollupBlock { - public ethBlockNumber: number + public timestamp: number public blockIndex: number //index in public cumulativePrevElements: number //in blockHeader public elements: string[] //Rollup block public elementsMerkleTree: SparseMerkleTreeImpl constructor( - ethBlockNumber: number, // Ethereum block this block was submitted in + timestamp: number, // Ethereum block this block was submitted in blockIndex: number, // index in blocks array (first block has blockIndex of 0) cumulativePrevElements: number, elements: string[] ) { - this.ethBlockNumber = ethBlockNumber + this.timestamp = timestamp this.blockIndex = blockIndex this.cumulativePrevElements = cumulativePrevElements this.elements = elements @@ -95,7 +95,7 @@ export class DefaultRollupBlock { const encoding = abiCoder.encode( ['uint', 'bytes32', 'uint', 'uint'], [ - this.ethBlockNumber, + this.timestamp, bufToHexString(bufferRoot), this.elements.length, this.cumulativePrevElements, @@ -116,7 +116,7 @@ export class DefaultRollupBlock { return { blockIndex: this.blockIndex, blockHeader: { - ethBlockNumber: this.ethBlockNumber, + timestamp: this.timestamp, elementsMerkleRoot: bufToHexString(bufferRoot), numElementsInBlock: this.elements.length, cumulativePrevElements: this.cumulativePrevElements, diff --git a/packages/rollup-contracts/test/rollup-list/RollupList.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupList.spec.ts index 6734b02247f64..6401fe4d618f0 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupList.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupList.spec.ts @@ -15,378 +15,378 @@ import * as RollupList from '../../build/RollupList.json' import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ -describe('RollupList', () => { - const provider = createMockProvider() - const [wallet1, wallet2] = getWallets(provider) - let rollupList - let rollupMerkleUtils - let rollupCtLogFilter +// describe('RollupList', () => { +// const provider = createMockProvider() +// const [wallet1, wallet2] = getWallets(provider) +// let rollupList +// let rollupMerkleUtils +// let rollupCtLogFilter - /* Link libraries before tests */ - before(async () => { - rollupMerkleUtils = await deployContract(wallet1, RollupMerkleUtils, [], { - gasLimit: 6700000, - }) - }) +// /* Link libraries before tests */ +// before(async () => { +// rollupMerkleUtils = await deployContract(wallet1, RollupMerkleUtils, [], { +// gasLimit: 6700000, +// }) +// }) - /* Deploy a new RollupChain before each test */ - beforeEach(async () => { - rollupList = await deployContract( - wallet1, - RollupList, - [rollupMerkleUtils.address], - { - gasLimit: 6700000, - } - ) - rollupCtLogFilter = { - address: rollupList.address, - fromBlock: 0, - toBlock: 'latest', - } - }) +// /* Deploy a new RollupChain before each test */ +// beforeEach(async () => { +// rollupList = await deployContract( +// wallet1, +// RollupList, +// [rollupMerkleUtils.address], +// { +// gasLimit: 6700000, +// } +// ) +// rollupCtLogFilter = { +// address: rollupList.address, +// fromBlock: 0, +// toBlock: 'latest', +// } +// }) - const enqueueAndGenerateBlock = async ( - block: string[], - blockIndex: number, - cumulativePrevElements: number - ): Promise => { - // Submit the rollup block on-chain - const enqueueTx = await rollupList.enqueueBlock(block) - const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) - // Generate a local version of the rollup block - const ethBlockNumber = txReceipt.blockNumber - const localBlock = new DefaultRollupBlock( - ethBlockNumber, - blockIndex, - cumulativePrevElements, - block - ) - await localBlock.generateTree() - return localBlock - } - /* - * Test enqueueBlock() - */ - describe('enqueueBlock() ', async () => { - it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { - const block = ['0x1234', '0x1234'] - await rollupList.enqueueBlock(block) // Did not throw... success! - }) +// const enqueueAndGenerateBlock = async ( +// block: string[], +// blockIndex: number, +// cumulativePrevElements: number +// ): Promise => { +// // Submit the rollup block on-chain +// const enqueueTx = await rollupList.enqueueBlock(block) +// const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) +// // Generate a local version of the rollup block +// const ethBlockNumber = txReceipt.blockNumber +// const localBlock = new DefaultRollupBlock( +// ethBlockNumber, +// blockIndex, +// cumulativePrevElements, +// block +// ) +// await localBlock.generateTree() +// return localBlock +// } +// /* +// * Test enqueueBlock() +// */ +// describe('enqueueBlock() ', async () => { +// it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { +// const block = ['0x1234', '0x1234'] +// await rollupList.enqueueBlock(block) // Did not throw... success! +// }) - it('should throw if submitting an empty block', async () => { - const emptyBlock = [] - try { - await rollupList.enqueueBlock(emptyBlock) - } catch (err) { - // Success we threw an error! - return - } - throw new Error('Allowed an empty block to be appended') - }) +// it('should throw if submitting an empty block', async () => { +// const emptyBlock = [] +// try { +// await rollupList.enqueueBlock(emptyBlock) +// } catch (err) { +// // Success we threw an error! +// return +// } +// throw new Error('Allowed an empty block to be appended') +// }) - it('should add to blocks array', async () => { - const block = ['0x1234', '0x6578'] - const output = await rollupList.enqueueBlock(block) - log.debug('enqueue block output', JSON.stringify(output)) - const blocksLength = await rollupList.getBlocksLength() - blocksLength.toNumber().should.equal(1) - }) +// it('should add to blocks array', async () => { +// const block = ['0x1234', '0x6578'] +// const output = await rollupList.enqueueBlock(block) +// log.debug('enqueue block output', JSON.stringify(output)) +// const blocksLength = await rollupList.getBlocksLength() +// blocksLength.toNumber().should.equal(1) +// }) - it('should update cumulativeNumElements correctly', async () => { - const block = ['0x1234', '0x5678'] - await rollupList.enqueueBlock(block) - const cumulativeNumElements = await rollupList.cumulativeNumElements.call() - cumulativeNumElements.toNumber().should.equal(2) - }) +// it('should update cumulativeNumElements correctly', async () => { +// const block = ['0x1234', '0x5678'] +// await rollupList.enqueueBlock(block) +// const cumulativeNumElements = await rollupList.cumulativeNumElements.call() +// cumulativeNumElements.toNumber().should.equal(2) +// }) - it('should calculate blockHeaderHash correctly', async () => { - const block = ['0x1234', '0x5678'] - const blockIndex = 0 - const cumulativePrevElements = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - //Check blockHeaderHash - const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - const calculatedBlockHeaderHash = await rollupList.blocks(0) - calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) - }) +// it('should calculate blockHeaderHash correctly', async () => { +// const block = ['0x1234', '0x5678'] +// const blockIndex = 0 +// const cumulativePrevElements = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// //Check blockHeaderHash +// const expectedBlockHeaderHash = await localBlock.hashBlockHeader() +// const calculatedBlockHeaderHash = await rollupList.blocks(0) +// calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) +// }) - it('should add multiple blocks correctly', async () => { - const block = ['0x1234', '0x5678'] - const numBlocks = 10 - for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { - const cumulativePrevElements = block.length * blockIndex - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - //Check blockHeaderHash - const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - const calculatedBlockHeaderHash = await rollupList.blocks(blockIndex) - calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) - } - //check cumulativeNumElements - const cumulativeNumElements = await rollupList.cumulativeNumElements.call() - cumulativeNumElements.toNumber().should.equal(numBlocks * block.length) - //check blocks length - const blocksLength = await rollupList.getBlocksLength() - blocksLength.toNumber().should.equal(numBlocks) - }) - //TODO test with actual transitions and actual state roots - //TODO test above with multiple blocks with different # elements and different size elements - }) +// it('should add multiple blocks correctly', async () => { +// const block = ['0x1234', '0x5678'] +// const numBlocks = 10 +// for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { +// const cumulativePrevElements = block.length * blockIndex +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// //Check blockHeaderHash +// const expectedBlockHeaderHash = await localBlock.hashBlockHeader() +// const calculatedBlockHeaderHash = await rollupList.blocks(blockIndex) +// calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) +// } +// //check cumulativeNumElements +// const cumulativeNumElements = await rollupList.cumulativeNumElements.call() +// cumulativeNumElements.toNumber().should.equal(numBlocks * block.length) +// //check blocks length +// const blocksLength = await rollupList.getBlocksLength() +// blocksLength.toNumber().should.equal(numBlocks) +// }) +// //TODO test with actual transitions and actual state roots +// //TODO test above with multiple blocks with different # elements and different size elements +// }) - /* - * Test verifyElement() - */ - describe('verifyElement() ', async () => { - it('should return true for valid elements for different blockIndexs', async () => { - const maxBlockNumber = 5 - const minBlockNumber = 0 - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - // Create trees of multiple sizes tree - for ( - let blockIndex = minBlockNumber; - blockIndex < maxBlockNumber + 1; - blockIndex++ - ) { - log.debug(`testing valid proof for block #: ${blockIndex}`) - const cumulativePrevElements = block.length * blockIndex - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - // Create inclusion proof for the element at elementIndex - const elementIndex = 3 - const element = block[elementIndex] - const position = localBlock.getPosition(elementIndex) - const elementInclusionProof = await localBlock.getElementInclusionProof( - elementIndex - ) - log.debug( - `trying to correctly verify this inclusion proof: ${JSON.stringify( - elementInclusionProof - )}` - ) - //run verifyElement() - // - const isIncluded = await rollupList.verifyElement( - element, - position, - elementInclusionProof - ) - log.debug('isIncluded: ', JSON.stringify(isIncluded)) - isIncluded.should.equal(true) - } - }) +// /* +// * Test verifyElement() +// */ +// describe('verifyElement() ', async () => { +// it('should return true for valid elements for different blockIndexs', async () => { +// const maxBlockNumber = 5 +// const minBlockNumber = 0 +// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] +// // Create trees of multiple sizes tree +// for ( +// let blockIndex = minBlockNumber; +// blockIndex < maxBlockNumber + 1; +// blockIndex++ +// ) { +// log.debug(`testing valid proof for block #: ${blockIndex}`) +// const cumulativePrevElements = block.length * blockIndex +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// // Create inclusion proof for the element at elementIndex +// const elementIndex = 3 +// const element = block[elementIndex] +// const position = localBlock.getPosition(elementIndex) +// const elementInclusionProof = await localBlock.getElementInclusionProof( +// elementIndex +// ) +// log.debug( +// `trying to correctly verify this inclusion proof: ${JSON.stringify( +// elementInclusionProof +// )}` +// ) +// //run verifyElement() +// // +// const isIncluded = await rollupList.verifyElement( +// element, +// position, +// elementInclusionProof +// ) +// log.debug('isIncluded: ', JSON.stringify(isIncluded)) +// isIncluded.should.equal(true) +// } +// }) - it('should return false for wrong position with wrong indexInBlock', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - const elementIndex = 1 - const element = block[elementIndex] - const position = localBlock.getPosition(elementIndex) - const elementInclusionProof = await localBlock.getElementInclusionProof( - elementIndex - ) - log.debug( - `trying to falsely verify this inclusion proof: ${JSON.stringify( - elementInclusionProof - )}` - ) - //Give wrong position so inclusion proof is wrong - const wrongPosition = position + 1 - //run verifyElement() - // - const isIncluded = await rollupList.verifyElement( - element, - wrongPosition, - elementInclusionProof - ) - log.debug('isIncluded: ', JSON.stringify(isIncluded)) - isIncluded.should.equal(false) - }) +// it('should return false for wrong position with wrong indexInBlock', async () => { +// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// const elementIndex = 1 +// const element = block[elementIndex] +// const position = localBlock.getPosition(elementIndex) +// const elementInclusionProof = await localBlock.getElementInclusionProof( +// elementIndex +// ) +// log.debug( +// `trying to falsely verify this inclusion proof: ${JSON.stringify( +// elementInclusionProof +// )}` +// ) +// //Give wrong position so inclusion proof is wrong +// const wrongPosition = position + 1 +// //run verifyElement() +// // +// const isIncluded = await rollupList.verifyElement( +// element, +// wrongPosition, +// elementInclusionProof +// ) +// log.debug('isIncluded: ', JSON.stringify(isIncluded)) +// isIncluded.should.equal(false) +// }) - it('should return false for wrong position and matching indexInBlock', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0xabcd'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - //generate inclusion proof - const elementIndex = 1 - const element = block[elementIndex] - const position = localBlock.getPosition(elementIndex) - const elementInclusionProof = await localBlock.getElementInclusionProof( - elementIndex - ) - //Give wrong position so inclusion proof is wrong - const wrongPosition = position + 1 - //Change index to also be false (so position = index + cumulative) - elementInclusionProof.indexInBlock++ - log.debug( - `trying to falsely verify this inclusion proof: ${JSON.stringify( - elementInclusionProof - )}` - ) - //run verifyElement() - // - const isIncluded = await rollupList.verifyElement( - element, - wrongPosition, - elementInclusionProof - ) - log.debug('isIncluded: ', JSON.stringify(isIncluded)) - isIncluded.should.equal(false) - }) - }) +// it('should return false for wrong position and matching indexInBlock', async () => { +// const block = ['0x1234', '0x4567', '0x890a', '0xabcd'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// //generate inclusion proof +// const elementIndex = 1 +// const element = block[elementIndex] +// const position = localBlock.getPosition(elementIndex) +// const elementInclusionProof = await localBlock.getElementInclusionProof( +// elementIndex +// ) +// //Give wrong position so inclusion proof is wrong +// const wrongPosition = position + 1 +// //Change index to also be false (so position = index + cumulative) +// elementInclusionProof.indexInBlock++ +// log.debug( +// `trying to falsely verify this inclusion proof: ${JSON.stringify( +// elementInclusionProof +// )}` +// ) +// //run verifyElement() +// // +// const isIncluded = await rollupList.verifyElement( +// element, +// wrongPosition, +// elementInclusionProof +// ) +// log.debug('isIncluded: ', JSON.stringify(isIncluded)) +// isIncluded.should.equal(false) +// }) +// }) - /* - * Test deleteAfterInclusive() - */ - describe('deleteAfterInclusive() ', async () => { - it('should delete single block', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - const blockHeader = { - ethBlockNumber: localBlock.ethBlockNumber, - elementsMerkleRoot: await localBlock.elementsMerkleTree.getRootHash(), - numElementsInBlock: block.length, - cumulativePrevElements, - } - // Submit the rollup block on-chain - let blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) - await rollupList.deleteAfterInclusive( - blockIndex, // delete the single appended block - blockHeader - ) - blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(0) - }) +// /* +// * Test deleteAfterInclusive() +// */ +// describe('deleteAfterInclusive() ', async () => { +// it('should delete single block', async () => { +// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// const blockHeader = { +// ethBlockNumber: localBlock.ethBlockNumber, +// elementsMerkleRoot: await localBlock.elementsMerkleTree.getRootHash(), +// numElementsInBlock: block.length, +// cumulativePrevElements, +// } +// // Submit the rollup block on-chain +// let blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength before deletion: ${blocksLength}`) +// await rollupList.deleteAfterInclusive( +// blockIndex, // delete the single appended block +// blockHeader +// ) +// blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength after deletion: ${blocksLength}`) +// blocksLength.should.equal(0) +// }) - it('should delete many blocks', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBlocks = [] - for (let blockIndex = 0; blockIndex < 5; blockIndex++) { - const cumulativePrevElements = blockIndex * block.length - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - localBlocks.push(localBlock) - } - const deleteBlockNumber = 0 - const deleteBlock = localBlocks[deleteBlockNumber] - const blockHeader = { - ethBlockNumber: deleteBlock.ethBlockNumber, - elementsMerkleRoot: deleteBlock.elementsMerkleTree.getRootHash(), - numElementsInBlock: block.length, - cumulativePrevElements: deleteBlock.cumulativePrevElements, - } - let blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) - await rollupList.deleteAfterInclusive( - deleteBlockNumber, // delete all blocks (including and after block 0) - blockHeader - ) - blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(0) - }) - }) +// it('should delete many blocks', async () => { +// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] +// const localBlocks = [] +// for (let blockIndex = 0; blockIndex < 5; blockIndex++) { +// const cumulativePrevElements = blockIndex * block.length +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// localBlocks.push(localBlock) +// } +// const deleteBlockNumber = 0 +// const deleteBlock = localBlocks[deleteBlockNumber] +// const blockHeader = { +// ethBlockNumber: deleteBlock.ethBlockNumber, +// elementsMerkleRoot: deleteBlock.elementsMerkleTree.getRootHash(), +// numElementsInBlock: block.length, +// cumulativePrevElements: deleteBlock.cumulativePrevElements, +// } +// let blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength before deletion: ${blocksLength}`) +// await rollupList.deleteAfterInclusive( +// deleteBlockNumber, // delete all blocks (including and after block 0) +// blockHeader +// ) +// blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength after deletion: ${blocksLength}`) +// blocksLength.should.equal(0) +// }) +// }) - describe('dequeueBeforeInclusive()', async () => { - it('should dequeue single block', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - let blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) - let front = await rollupList.front() - log.debug(`front before deletion: ${front}`) - let firstBlockHash = await rollupList.blocks(0) - log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) +// describe('dequeueBeforeInclusive()', async () => { +// it('should dequeue single block', async () => { +// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// let blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength before deletion: ${blocksLength}`) +// let front = await rollupList.front() +// log.debug(`front before deletion: ${front}`) +// let firstBlockHash = await rollupList.blocks(0) +// log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) - // delete the single appended block - await rollupList.dequeueBeforeInclusive(blockIndex) +// // delete the single appended block +// await rollupList.dequeueBeforeInclusive(blockIndex) - blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(1) - firstBlockHash = await rollupList.blocks(0) - log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) - firstBlockHash.should.equal( - '0x0000000000000000000000000000000000000000000000000000000000000000' - ) - front = await rollupList.front() - log.debug(`front after deletion: ${front}`) - front.should.equal(1) - }) +// blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength after deletion: ${blocksLength}`) +// blocksLength.should.equal(1) +// firstBlockHash = await rollupList.blocks(0) +// log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) +// firstBlockHash.should.equal( +// '0x0000000000000000000000000000000000000000000000000000000000000000' +// ) +// front = await rollupList.front() +// log.debug(`front after deletion: ${front}`) +// front.should.equal(1) +// }) - it('should dequeue many blocks', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBlocks = [] - const numBlocks = 5 - for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { - const cumulativePrevElements = block.length * blockIndex - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - localBlocks.push(localBlock) - } - let blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) - let front = await rollupList.front() - log.debug(`front before deletion: ${front}`) - for (let i = 0; i < numBlocks; i++) { - const ithBlockHash = await rollupList.blocks(i) - log.debug(`blockHash #${i} before deletion: ${ithBlockHash}`) - } - await rollupList.dequeueBeforeInclusive(numBlocks - 1) - blocksLength = await rollupList.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(numBlocks) - front = await rollupList.front() - log.debug(`front after deletion: ${front}`) - front.should.equal(numBlocks) - for (let i = 0; i < numBlocks; i++) { - const ithBlockHash = await rollupList.blocks(i) - log.debug(`blockHash #${i} after deletion: ${ithBlockHash}`) - ithBlockHash.should.equal( - '0x0000000000000000000000000000000000000000000000000000000000000000' - ) - } - }) - }) -}) +// it('should dequeue many blocks', async () => { +// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] +// const localBlocks = [] +// const numBlocks = 5 +// for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { +// const cumulativePrevElements = block.length * blockIndex +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// localBlocks.push(localBlock) +// } +// let blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength before deletion: ${blocksLength}`) +// let front = await rollupList.front() +// log.debug(`front before deletion: ${front}`) +// for (let i = 0; i < numBlocks; i++) { +// const ithBlockHash = await rollupList.blocks(i) +// log.debug(`blockHash #${i} before deletion: ${ithBlockHash}`) +// } +// await rollupList.dequeueBeforeInclusive(numBlocks - 1) +// blocksLength = await rollupList.getBlocksLength() +// log.debug(`blocksLength after deletion: ${blocksLength}`) +// blocksLength.should.equal(numBlocks) +// front = await rollupList.front() +// log.debug(`front after deletion: ${front}`) +// front.should.equal(numBlocks) +// for (let i = 0; i < numBlocks; i++) { +// const ithBlockHash = await rollupList.blocks(i) +// log.debug(`blockHash #${i} after deletion: ${ithBlockHash}`) +// ithBlockHash.should.equal( +// '0x0000000000000000000000000000000000000000000000000000000000000000' +// ) +// } +// }) +// }) +// }) diff --git a/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts index d82cabd7fd55f..f0b8e15b48642 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts @@ -15,123 +15,123 @@ import * as RollupTransactionQueue from '../../build/RollupTransactionQueue.json import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ -describe('RollupTransactionQueue', () => { - const provider = createMockProvider() - const [wallet, sequencer, canonicalTransactionChain] = getWallets(provider) - let rollupTxQueue - let rollupMerkleUtils +// describe('RollupTransactionQueue', () => { +// const provider = createMockProvider() +// const [wallet, sequencer, canonicalTransactionChain] = getWallets(provider) +// let rollupTxQueue +// let rollupMerkleUtils - /* Link libraries before tests */ - before(async () => { - rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { - gasLimit: 6700000, - }) - }) +// /* Link libraries before tests */ +// before(async () => { +// rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { +// gasLimit: 6700000, +// }) +// }) - /* Deploy a new RollupChain before each test */ - beforeEach(async () => { - rollupTxQueue = await deployContract( - wallet, - RollupTransactionQueue, - [ - rollupMerkleUtils.address, - sequencer.address, - canonicalTransactionChain.address, - ], - { - gasLimit: 6700000, - } - ) - }) +// /* Deploy a new RollupChain before each test */ +// beforeEach(async () => { +// rollupTxQueue = await deployContract( +// wallet, +// RollupTransactionQueue, +// [ +// rollupMerkleUtils.address, +// sequencer.address, +// canonicalTransactionChain.address, +// ], +// { +// gasLimit: 6700000, +// } +// ) +// }) - const enqueueAndGenerateBlock = async ( - block: string[], - blockIndex: number, - cumulativePrevElements: number - ): Promise => { - // Submit the rollup block on-chain - const enqueueTx = await rollupTxQueue.connect(sequencer).enqueueBlock(block) - const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) - // Generate a local version of the rollup block - const ethBlockNumber = txReceipt.blockNumber - const localBlock = new DefaultRollupBlock( - ethBlockNumber, - blockIndex, - cumulativePrevElements, - block - ) - await localBlock.generateTree() - return localBlock - } +// const enqueueAndGenerateBlock = async ( +// block: string[], +// blockIndex: number, +// cumulativePrevElements: number +// ): Promise => { +// // Submit the rollup block on-chain +// const enqueueTx = await rollupTxQueue.connect(sequencer).enqueueBlock(block) +// const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) +// // Generate a local version of the rollup block +// const ethBlockNumber = txReceipt.blockNumber +// const localBlock = new DefaultRollupBlock( +// ethBlockNumber, +// blockIndex, +// cumulativePrevElements, +// block +// ) +// await localBlock.generateTree() +// return localBlock +// } - /* - * Test enqueueBlock() - */ - describe('enqueueBlock() ', async () => { - it('should allow enqueue from sequencer', async () => { - const block = ['0x1234'] - await rollupTxQueue.connect(sequencer).enqueueBlock(block) // Did not throw... success! - }) - it('should not allow enqueue from other address', async () => { - const block = ['0x1234'] - await rollupTxQueue - .enqueueBlock(block) - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' - ) - }) - }) - /* - * Test dequeueBlock() - */ - describe('dequeueBlock() ', async () => { - it('should allow dequeue from canonicalTransactionChain', async () => { - const block = ['0x1234'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - let blocksLength = await rollupTxQueue.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) - let front = await rollupTxQueue.front() - log.debug(`front before deletion: ${front}`) - let firstBlockHash = await rollupTxQueue.blocks(0) - log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) +// /* +// * Test enqueueBlock() +// */ +// describe('enqueueBlock() ', async () => { +// it('should allow enqueue from sequencer', async () => { +// const block = ['0x1234'] +// await rollupTxQueue.connect(sequencer).enqueueBlock(block) // Did not throw... success! +// }) +// it('should not allow enqueue from other address', async () => { +// const block = ['0x1234'] +// await rollupTxQueue +// .enqueueBlock(block) +// .should.be.revertedWith( +// 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' +// ) +// }) +// }) +// /* +// * Test dequeueBlock() +// */ +// describe('dequeueBlock() ', async () => { +// it('should allow dequeue from canonicalTransactionChain', async () => { +// const block = ['0x1234'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// let blocksLength = await rollupTxQueue.getBlocksLength() +// log.debug(`blocksLength before deletion: ${blocksLength}`) +// let front = await rollupTxQueue.front() +// log.debug(`front before deletion: ${front}`) +// let firstBlockHash = await rollupTxQueue.blocks(0) +// log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) - // delete the single appended block - await rollupTxQueue - .connect(canonicalTransactionChain) - .dequeueBeforeInclusive(blockIndex) +// // delete the single appended block +// await rollupTxQueue +// .connect(canonicalTransactionChain) +// .dequeueBeforeInclusive(blockIndex) - blocksLength = await rollupTxQueue.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(1) - firstBlockHash = await rollupTxQueue.blocks(0) - log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) - firstBlockHash.should.equal( - '0x0000000000000000000000000000000000000000000000000000000000000000' - ) - front = await rollupTxQueue.front() - log.debug(`front after deletion: ${front}`) - front.should.equal(1) - }) - it('should not allow dequeue from other address', async () => { - const block = ['0x1234'] - const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock( - block, - blockIndex, - cumulativePrevElements - ) - await rollupTxQueue - .dequeueBeforeInclusive(blockIndex) - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' - ) - }) - }) -}) +// blocksLength = await rollupTxQueue.getBlocksLength() +// log.debug(`blocksLength after deletion: ${blocksLength}`) +// blocksLength.should.equal(1) +// firstBlockHash = await rollupTxQueue.blocks(0) +// log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) +// firstBlockHash.should.equal( +// '0x0000000000000000000000000000000000000000000000000000000000000000' +// ) +// front = await rollupTxQueue.front() +// log.debug(`front after deletion: ${front}`) +// front.should.equal(1) +// }) +// it('should not allow dequeue from other address', async () => { +// const block = ['0x1234'] +// const cumulativePrevElements = 0 +// const blockIndex = 0 +// const localBlock = await enqueueAndGenerateBlock( +// block, +// blockIndex, +// cumulativePrevElements +// ) +// await rollupTxQueue +// .dequeueBeforeInclusive(blockIndex) +// .should.be.revertedWith( +// 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' +// ) +// }) +// }) +// }) From 57443a789fe773ec4c3aeddc822d5676a9cc6461 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Sun, 17 May 2020 19:56:30 -0400 Subject: [PATCH 04/37] add isL1ToL2Tx, fix hashBlockHeader --- .../contracts/CanonicalTransactionChain.sol | 2 +- .../rollup-list/CanonicalTransactionChain.spec.ts | 1 + .../rollup-contracts/test/rollup-list/RLhelper.ts | 12 ++++++++---- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 805e53a16c7bb..cb2ce5fe2de8e 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -37,8 +37,8 @@ contract CanonicalTransactionChain is RollupList { // calculate block header bytes32 blockHeaderHash = keccak256(abi.encodePacked( - _timestamp, //timestamp, duh _timestamp, + false, // isL1ToL2Tx merkleUtils.getMerkleRoot(_txBatch), // elementsMerkleRoot _txBatch.length, // numElementsInBlock cumulativeNumElements // cumulativeNumElements diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index fc13c29b72165..7b8808414d759 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -57,6 +57,7 @@ describe.only('CanonicalTransactionChain', () => { // Generate a local version of the rollup block const localBlock = new DefaultRollupBlock( timestamp, + false, blockIndex, cumulativePrevElements, block diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 95c14a781d467..ba5c034f7022d 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -12,6 +12,7 @@ import { utils } from 'ethers' interface BlockHeader { timestamp: number + isL1ToL2Tx: boolean elementsMerkleRoot: string numElementsInBlock: number cumulativePrevElements: number @@ -31,6 +32,7 @@ interface ElementInclusionProof { */ export class DefaultRollupBlock { public timestamp: number + public isL1ToL2Tx: boolean public blockIndex: number //index in public cumulativePrevElements: number //in blockHeader public elements: string[] //Rollup block @@ -38,10 +40,12 @@ export class DefaultRollupBlock { constructor( timestamp: number, // Ethereum block this block was submitted in + isL1ToL2Tx: boolean, blockIndex: number, // index in blocks array (first block has blockIndex of 0) cumulativePrevElements: number, elements: string[] ) { + this.isL1ToL2Tx = isL1ToL2Tx this.timestamp = timestamp this.blockIndex = blockIndex this.cumulativePrevElements = cumulativePrevElements @@ -91,17 +95,16 @@ export class DefaultRollupBlock { public async hashBlockHeader(): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() - const abiCoder = new utils.AbiCoder() - const encoding = abiCoder.encode( - ['uint', 'bytes32', 'uint', 'uint'], + return utils.solidityKeccak256( + ['uint', 'bool', 'bytes32', 'uint', 'uint'], [ this.timestamp, + this.isL1ToL2Tx, bufToHexString(bufferRoot), this.elements.length, this.cumulativePrevElements, ] ) - return bufToHexString(Buffer.from(keccak256(encoding), 'hex')) } /* @@ -117,6 +120,7 @@ export class DefaultRollupBlock { blockIndex: this.blockIndex, blockHeader: { timestamp: this.timestamp, + isL1ToL2Tx: this.isL1ToL2Tx, elementsMerkleRoot: bufToHexString(bufferRoot), numElementsInBlock: this.elements.length, cumulativePrevElements: this.cumulativePrevElements, From 7c7bd14c8301736dcb1db25e3525478b06b838ec Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 18 May 2020 02:28:52 -0400 Subject: [PATCH 05/37] rollupList -> RollupQueue, fix canonicalTransactionChain --- .../contracts/CanonicalTransactionChain.sol | 74 +++- .../rollup-contracts/contracts/DataTypes.sol | 3 +- .../contracts/L1ToL2TransactionQueue.sol | 9 +- .../rollup-contracts/contracts/RollupList.sol | 121 ------ .../contracts/RollupQueue.sol | 67 +++ .../contracts/RollupTransactionQueue.sol | 27 -- .../CanonicalTransactionChain.spec.ts | 200 ++++++++- .../L1ToL2TransactionQueue.spec.ts | 212 ++++------ .../test/rollup-list/RollupList.spec.ts | 392 ------------------ .../test/rollup-list/RollupQueue.spec.ts | 392 ++++++++++++++++++ .../RollupTransactionQueue.spec.ts | 137 ------ 11 files changed, 802 insertions(+), 832 deletions(-) delete mode 100644 packages/rollup-contracts/contracts/RollupList.sol create mode 100644 packages/rollup-contracts/contracts/RollupQueue.sol delete mode 100644 packages/rollup-contracts/contracts/RollupTransactionQueue.sol delete mode 100644 packages/rollup-contracts/test/rollup-list/RollupList.spec.ts create mode 100644 packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts delete mode 100644 packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index cb2ce5fe2de8e..d8e13c0b3efdb 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -2,19 +2,43 @@ pragma solidity ^0.5.0; pragma experimental ABIEncoderV2; /* Internal Imports */ -import {RollupList} from "./RollupList.sol"; +import {DataTypes as dt} from "./DataTypes.sol"; +import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; -contract CanonicalTransactionChain is RollupList { +contract CanonicalTransactionChain { + // The Rollup Merkle Tree library (currently a contract for ease of testing) + RollupMerkleUtils merkleUtils; address public sequencer; - address public canonicalTransactionChain; + + // How many elements in total have been appended + uint public cumulativeNumElements; + // List of block header hashes + bytes32[] public blocks; + constructor( address _rollupMerkleUtilsAddress, - address _sequencer, - address _canonicalTransactionChain - ) RollupList(_rollupMerkleUtilsAddress) public { + address _sequencer + ) public { + merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); sequencer = _sequencer; - canonicalTransactionChain = _canonicalTransactionChain; + } + + // for testing: returns length of block list + function getBlocksLength() public view returns (uint) { + return blocks.length; + } + + function hashBlockHeader( + dt.BlockHeader memory _blockHeader + ) public pure returns (bytes32) { + return keccak256(abi.encodePacked( + _blockHeader.timestamp, + _blockHeader.isL1ToL2Tx, + _blockHeader.elementsMerkleRoot, + _blockHeader.numElementsInBlock, + _blockHeader.cumulativePrevElements + )); } function authenticateEnqueue(address _sender) public view returns (bool) { @@ -34,7 +58,6 @@ contract CanonicalTransactionChain is RollupList { // require dist(_timestamp, block.timestamp) < sequencerLivenessAssumption // require(L1ToL2Queue.ageOfOldestQueuedBlock() < sequencerLivenessAssumption, "must process all L1->L2 blocks older than liveness assumption before processing L2 blocks.") - // calculate block header bytes32 blockHeaderHash = keccak256(abi.encodePacked( _timestamp, @@ -47,20 +70,29 @@ contract CanonicalTransactionChain is RollupList { blocks.push(blockHeaderHash); // update cumulative elements cumulativeNumElements += _txBatch.length; + } + // verifies an element is in the current list at the given position + function verifyElement( + bytes memory _element, // the element of the list being proven + uint _position, // the position in the list of the element being proven + dt.ElementInclusionProof memory _inclusionProof // inclusion proof in the rollup block + ) public view returns (bool) { + // For convenience, store the blockHeader + dt.BlockHeader memory blockHeader = _inclusionProof.blockHeader; + // make sure absolute position equivalent to relative positions + if(_position != _inclusionProof.indexInBlock + + blockHeader.cumulativePrevElements) + return false; - - // // calculate block header - // bytes32 blockHeaderHash = keccak256(abi.encodePacked( - // _timestamp, //timestamp, duh - // false, //isL1ToL2Tx - // merkleUtils.getMerkleRoot(_txBatch), // elementsMerkleRoot - // _txBatch.length, // numElementsInBlock - // cumulativeNumElements // cumulativePrevElements - // )); - // // store block header - // blocks.push(blockHeaderHash); - // // update cumulative elements - // cumulativeNumElements += _txBatch.length; + // verify elementsMerkleRoot + if (!merkleUtils.verify( + blockHeader.elementsMerkleRoot, + _element, + _inclusionProof.indexInBlock, + _inclusionProof.siblings + )) return false; + //compare computed block header with the block header in the list. + return hashBlockHeader(blockHeader) == blocks[_inclusionProof.blockIndex]; } } diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index be2c089f633d1..3a5d12916604f 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -19,7 +19,8 @@ contract DataTypes { } struct BlockHeader { - uint ethBlockNumber; + uint timestamp; + bool isL1ToL2Tx; bytes32 elementsMerkleRoot; uint numElementsInBlock; uint cumulativePrevElements; diff --git a/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol b/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol index f9e8b24dd4b9d..7bef578613857 100644 --- a/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol +++ b/packages/rollup-contracts/contracts/L1ToL2TransactionQueue.sol @@ -2,9 +2,9 @@ pragma solidity ^0.5.0; pragma experimental ABIEncoderV2; /* Internal Imports */ -import {RollupList} from "./RollupList.sol"; +import {RollupQueue} from "./RollupQueue.sol"; -contract L1ToL2TransactionQueue is RollupList { +contract L1ToL2TransactionQueue is RollupQueue { address public l1ToL2TransactionPasser; address public canonicalTransactionChain; @@ -12,16 +12,15 @@ contract L1ToL2TransactionQueue is RollupList { address _rollupMerkleUtilsAddress, address _l1ToL2TransactionPasser, address _canonicalTransactionChain - ) RollupList(_rollupMerkleUtilsAddress) public { + ) RollupQueue(_rollupMerkleUtilsAddress) public { l1ToL2TransactionPasser = _l1ToL2TransactionPasser; canonicalTransactionChain = _canonicalTransactionChain; } - + function authenticateEnqueue(address _sender) public view returns (bool) { return _sender == l1ToL2TransactionPasser; } function authenticateDequeue(address _sender) public view returns (bool) { return _sender == canonicalTransactionChain; } - function authenticateDelete(address _sender) public view returns (bool) { return false; } } diff --git a/packages/rollup-contracts/contracts/RollupList.sol b/packages/rollup-contracts/contracts/RollupList.sol deleted file mode 100644 index 391375d40a5f8..0000000000000 --- a/packages/rollup-contracts/contracts/RollupList.sol +++ /dev/null @@ -1,121 +0,0 @@ -pragma solidity ^0.5.0; -pragma experimental ABIEncoderV2; - -/* Internal Imports */ -import {DataTypes as dt} from "./DataTypes.sol"; -import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; - -contract RollupList { - // How many elements in total have been appended - uint public cumulativeNumElements; - - // List of block header hashes - bytes32[] public blocks; - - uint256 public front; //Index of the first blockHeaderHash in the list - - // The Rollup Merkle Tree library (currently a contract for ease of testing) - RollupMerkleUtils merkleUtils; - - /*************** - * Constructor * - **************/ - constructor(address _rollupMerkleUtilsAddress) public { - merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); - front = 0; - } - // for testing: returns length of block list - function getBlocksLength() public view returns (uint) { - return blocks.length; - } - - function hashBlockHeader( - dt.BlockHeader memory _blockHeader - ) public pure returns (bytes32) { - return keccak256(abi.encodePacked( - _blockHeader.ethBlockNumber, - _blockHeader.elementsMerkleRoot, - _blockHeader.numElementsInBlock, - _blockHeader.cumulativePrevElements - )); - } - - function authenticateEnqueue(address _sender) public view returns (bool) { return true; } - function authenticateDelete(address _sender) public view returns (bool) { return true; } - function authenticateDequeue(address _sender) public view returns (bool) { return true; } - - // appends to the current list of blocks - function enqueueBlock(bytes[] memory _rollupBlock) public { - //Check that msg.sender is authorized to append - require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); - require(_rollupBlock.length > 0, "Cannot submit an empty block"); - // calculate block header - bytes32 blockHeaderHash = keccak256(abi.encodePacked( - block.number, // ethBlockNumber - merkleUtils.getMerkleRoot(_rollupBlock), // elementsMerkleRoot - _rollupBlock.length, // numElementsInBlock - cumulativeNumElements // cumulativeNumElements - )); - // store block header - blocks.push(blockHeaderHash); - // update cumulative elements - cumulativeNumElements += _rollupBlock.length; - } - - // verifies an element is in the current list at the given position - function verifyElement( - bytes memory _element, // the element of the list being proven - uint _position, // the position in the list of the element being proven - dt.ElementInclusionProof memory _inclusionProof // inclusion proof in the rollup block - ) public view returns (bool) { - // For convenience, store the blockHeader - dt.BlockHeader memory blockHeader = _inclusionProof.blockHeader; - // make sure absolute position equivalent to relative positions - if(_position != _inclusionProof.indexInBlock + - blockHeader.cumulativePrevElements) - return false; - - // verify elementsMerkleRoot - if (!merkleUtils.verify( - blockHeader.elementsMerkleRoot, - _element, - _inclusionProof.indexInBlock, - _inclusionProof.siblings - )) return false; - //compare computed block header with the block header in the list. - return hashBlockHeader(blockHeader) == blocks[_inclusionProof.blockIndex]; - } - - // deletes all blocks including and after the given block number - // TODO: rename to popAfterInclusive? - function deleteAfterInclusive( - uint _blockIndex, // delete this block index and those following - dt.BlockHeader memory _blockHeader - ) public { - //Check that msg.sender is authorized to delete - require(authenticateDelete(msg.sender), "Message sender does not have permission to delete blocks"); - //blockIndex is between first and last blocks - require(_blockIndex >= front && _blockIndex < blocks.length, "Cannot delete blocks outside of valid range"); - // make sure the provided state to revert to is correct - bytes32 calculatedBlockHeaderHash = hashBlockHeader(_blockHeader); - require(calculatedBlockHeaderHash == blocks[_blockIndex], "Calculated block header is different than expected block header"); - // revert back to the state as specified - blocks.length = _blockIndex; - cumulativeNumElements = _blockHeader.cumulativePrevElements; - } - - // dequeues all blocks including and before the given block index - function dequeueBeforeInclusive(uint _blockIndex) public { - //Check that msg.sender is authorized to delete - require(authenticateDequeue(msg.sender), "Message sender does not have permission to dequeue"); - //blockIndex is between first and last blocks - require(_blockIndex >= front && _blockIndex < blocks.length, "Cannot delete blocks outside of valid range"); - //delete all block headers before and including blockIndex - for (uint i = front; i <= _blockIndex; i++) { - delete blocks[i]; - } - //keep track of new head of list - front = _blockIndex + 1; - //TODO Note: keep in mind that front can point to a non-existent block if the list is empty. - } -} diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol new file mode 100644 index 0000000000000..3ecaab61762b7 --- /dev/null +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -0,0 +1,67 @@ +pragma solidity ^0.5.0; +pragma experimental ABIEncoderV2; + +/* Internal Imports */ +import {DataTypes as dt} from "./DataTypes.sol"; +import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; + +contract RollupQueue { + // How many elements in total have been appended + uint public cumulativeNumElements; + + // List of block header hashes + bytes32[] public blocks; + + uint256 public front; //Index of the first blockHeaderHash in the list + + // The Rollup Merkle Tree library (currently a contract for ease of testing) + RollupMerkleUtils merkleUtils; + + /*************** + * Constructor * + **************/ + constructor(address _rollupMerkleUtilsAddress) public { + merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); + front = 0; + } + // for testing: returns length of block list + function getBlocksLength() public view returns (uint) { + return blocks.length; + } + + function authenticateEnqueue(address _sender) public view returns (bool) { return true; } + function authenticateDequeue(address _sender) public view returns (bool) { return true; } + + // appends to the current list of blocks + function enqueueBlock(bytes[] memory _rollupBlock) public { + //Check that msg.sender is authorized to append + require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); + require(_rollupBlock.length > 0, "Cannot submit an empty block"); + // calculate block header + bytes32 blockHeaderHash = keccak256(abi.encodePacked( + block.number, // ethBlockNumber + merkleUtils.getMerkleRoot(_rollupBlock), // elementsMerkleRoot + _rollupBlock.length, // numElementsInBlock + cumulativeNumElements // cumulativeNumElements + )); + // store block header + blocks.push(blockHeaderHash); + // update cumulative elements + cumulativeNumElements += _rollupBlock.length; + } + + // dequeues all blocks including and before the given block index + function dequeueBeforeInclusive(uint _blockIndex) public { + //Check that msg.sender is authorized to delete + require(authenticateDequeue(msg.sender), "Message sender does not have permission to dequeue"); + //blockIndex is between first and last blocks + require(_blockIndex >= front && _blockIndex < blocks.length, "Cannot delete blocks outside of valid range"); + //delete all block headers before and including blockIndex + for (uint i = front; i <= _blockIndex; i++) { + delete blocks[i]; + } + //keep track of new head of list + front = _blockIndex + 1; + // Note: keep in mind that front can point to a non-existent block if the list is empty. + } +} diff --git a/packages/rollup-contracts/contracts/RollupTransactionQueue.sol b/packages/rollup-contracts/contracts/RollupTransactionQueue.sol deleted file mode 100644 index 8289519e7ebf9..0000000000000 --- a/packages/rollup-contracts/contracts/RollupTransactionQueue.sol +++ /dev/null @@ -1,27 +0,0 @@ -pragma solidity ^0.5.0; -pragma experimental ABIEncoderV2; - -/* Internal Imports */ -import {RollupList} from "./RollupList.sol"; - -contract RollupTransactionQueue is RollupList { - address public sequencer; - address public canonicalTransactionChain; - - constructor( - address _rollupMerkleUtilsAddress, - address _sequencer, - address _canonicalTransactionChain - ) RollupList(_rollupMerkleUtilsAddress) public { - sequencer = _sequencer; - canonicalTransactionChain = _canonicalTransactionChain; - } - - function authenticateEnqueue(address _sender) public view returns (bool) { - return _sender == sequencer; - } - function authenticateDequeue(address _sender) public view returns (bool) { - return _sender == canonicalTransactionChain; - } - function authenticateDelete(address _sender) public view returns (bool) { return false; } -} diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 7b8808414d759..dedf4c906bbee 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -15,7 +15,7 @@ import * as CanonicalTransactionChain from '../../build/CanonicalTransactionChai import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ -describe.only('CanonicalTransactionChain', () => { +describe('CanonicalTransactionChain', () => { const provider = createMockProvider() const [wallet, sequencer, canonicalTransactionChain] = getWallets(provider) let canonicalTxChain @@ -33,11 +33,7 @@ describe.only('CanonicalTransactionChain', () => { canonicalTxChain = await deployContract( wallet, CanonicalTransactionChain, - [ - rollupMerkleUtils.address, - sequencer.address, - canonicalTransactionChain.address, - ], + [rollupMerkleUtils.address, sequencer.address], { gasLimit: 6700000, } @@ -70,6 +66,48 @@ describe.only('CanonicalTransactionChain', () => { * Test enqueueBlock() */ describe('appendTransactionBatch() ', async () => { + it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { + const block = ['0x1234', '0x1234'] + const timestamp = 0 + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(block, timestamp) // Did not throw... success! + }) + + it('should throw if submitting an empty block', async () => { + const emptyBlock = [] + const timestamp = 0 + try { + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(emptyBlock, timestamp) + } catch (err) { + // Success we threw an error! + return + } + throw new Error('Allowed an empty block to be appended') + }) + + it('should add to blocks array', async () => { + const block = ['0x1234', '0x6578'] + const timestamp = 0 + const output = await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(block, timestamp) + log.debug('enqueue block output', JSON.stringify(output)) + const blocksLength = await canonicalTxChain.getBlocksLength() + blocksLength.toNumber().should.equal(1) + }) + + it('should update cumulativeNumElements correctly', async () => { + const block = ['0x1234', '0x5678'] + const timestamp = 0 + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(block, timestamp) + const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() + cumulativeNumElements.toNumber().should.equal(2) + }) it('should allow appendTransactionBatch from sequencer', async () => { const block = ['0x1234', '0x6578'] const timestamp = 0 @@ -86,7 +124,7 @@ describe.only('CanonicalTransactionChain', () => { 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' ) }) - it.only('should calculate blockHeaderHash correctly', async () => { + it('should calculate blockHeaderHash correctly', async () => { const block = ['0x1234', '0x5678'] const blockIndex = 0 const cumulativePrevElements = 0 @@ -102,5 +140,153 @@ describe.only('CanonicalTransactionChain', () => { const calculatedBlockHeaderHash = await canonicalTxChain.blocks(0) calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) }) + it('should add multiple blocks correctly', async () => { + const block = ['0x1234', '0x5678'] + const numBlocks = 10 + for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { + const timestamp = blockIndex + const cumulativePrevElements = block.length * blockIndex + const localBlock = await enqueueAndGenerateBlock( + block, + timestamp, + blockIndex, + cumulativePrevElements + ) + //Check blockHeaderHash + const expectedBlockHeaderHash = await localBlock.hashBlockHeader() + const calculatedBlockHeaderHash = await canonicalTxChain.blocks( + blockIndex + ) + calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + } + //check cumulativeNumElements + const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() + cumulativeNumElements.toNumber().should.equal(numBlocks * block.length) + //check blocks length + const blocksLength = await canonicalTxChain.getBlocksLength() + blocksLength.toNumber().should.equal(numBlocks) + }) + //TODO test with actual transitions and actual state roots + //TODO test above with multiple blocks with different # elements and different size elements + }) + + /* + * Test verifyElement() + */ + describe('verifyElement() ', async () => { + it('should return true for valid elements for different blockIndexs', async () => { + const maxBlockNumber = 5 + const minBlockNumber = 0 + const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + for ( + let blockIndex = minBlockNumber; + blockIndex < maxBlockNumber + 1; + blockIndex++ + ) { + log.debug(`testing valid proof for block #: ${blockIndex}`) + const timestamp = blockIndex + const cumulativePrevElements = block.length * blockIndex + const localBlock = await enqueueAndGenerateBlock( + block, + timestamp, + blockIndex, + cumulativePrevElements + ) + // Create inclusion proof for the element at elementIndex + const elementIndex = 3 + const element = block[elementIndex] + const position = localBlock.getPosition(elementIndex) + const elementInclusionProof = await localBlock.getElementInclusionProof( + elementIndex + ) + log.debug( + `trying to correctly verify this inclusion proof: ${JSON.stringify( + elementInclusionProof + )}` + ) + //run verifyElement() + // + const isIncluded = await canonicalTxChain.verifyElement( + element, + position, + elementInclusionProof + ) + log.debug('isIncluded: ', JSON.stringify(isIncluded)) + isIncluded.should.equal(true) + } + }) + + it('should return false for wrong position with wrong indexInBlock', async () => { + const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const cumulativePrevElements = 0 + const blockIndex = 0 + const timestamp = 0 + const localBlock = await enqueueAndGenerateBlock( + block, + timestamp, + blockIndex, + cumulativePrevElements + ) + const elementIndex = 1 + const element = block[elementIndex] + const position = localBlock.getPosition(elementIndex) + const elementInclusionProof = await localBlock.getElementInclusionProof( + elementIndex + ) + log.debug( + `trying to falsely verify this inclusion proof: ${JSON.stringify( + elementInclusionProof + )}` + ) + //Give wrong position so inclusion proof is wrong + const wrongPosition = position + 1 + //run verifyElement() + // + const isIncluded = await canonicalTxChain.verifyElement( + element, + wrongPosition, + elementInclusionProof + ) + log.debug('isIncluded: ', JSON.stringify(isIncluded)) + isIncluded.should.equal(false) + }) + + it('should return false for wrong position and matching indexInBlock', async () => { + const block = ['0x1234', '0x4567', '0x890a', '0xabcd'] + const cumulativePrevElements = 0 + const blockIndex = 0 + const timestamp = 0 + const localBlock = await enqueueAndGenerateBlock( + block, + timestamp, + blockIndex, + cumulativePrevElements + ) + //generate inclusion proof + const elementIndex = 1 + const element = block[elementIndex] + const position = localBlock.getPosition(elementIndex) + const elementInclusionProof = await localBlock.getElementInclusionProof( + elementIndex + ) + //Give wrong position so inclusion proof is wrong + const wrongPosition = position + 1 + //Change index to also be false (so position = index + cumulative) + elementInclusionProof.indexInBlock++ + log.debug( + `trying to falsely verify this inclusion proof: ${JSON.stringify( + elementInclusionProof + )}` + ) + //run verifyElement() + // + const isIncluded = await canonicalTxChain.verifyElement( + element, + wrongPosition, + elementInclusionProof + ) + log.debug('isIncluded: ', JSON.stringify(isIncluded)) + isIncluded.should.equal(false) + }) }) }) diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index dfb8bbef0c9e2..2eb10d8a25756 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -15,129 +15,99 @@ import * as L1ToL2TransactionQueue from '../../build/L1ToL2TransactionQueue.json import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ -// describe('L1ToL2TransactionQueue', () => { -// const provider = createMockProvider() -// const [ -// wallet, -// l1ToL2TransactionPasser, -// canonicalTransactionChain, -// ] = getWallets(provider) -// let l1ToL2TxQueue -// let rollupMerkleUtils +describe('L1ToL2TransactionQueue', () => { + const provider = createMockProvider() + const [ + wallet, + l1ToL2TransactionPasser, + canonicalTransactionChain, + ] = getWallets(provider) + let l1ToL2TxQueue + let rollupMerkleUtils -// /* Link libraries before tests */ -// before(async () => { -// rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { -// gasLimit: 6700000, -// }) -// }) + /* Link libraries before tests */ + before(async () => { + rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { + gasLimit: 6700000, + }) + }) -// /* Deploy a new RollupChain before each test */ -// beforeEach(async () => { -// l1ToL2TxQueue = await deployContract( -// wallet, -// L1ToL2TransactionQueue, -// [ -// rollupMerkleUtils.address, -// l1ToL2TransactionPasser.address, -// canonicalTransactionChain.address, -// ], -// { -// gasLimit: 6700000, -// } -// ) -// }) + /* Deploy a new RollupChain before each test */ + beforeEach(async () => { + l1ToL2TxQueue = await deployContract( + wallet, + L1ToL2TransactionQueue, + [ + rollupMerkleUtils.address, + l1ToL2TransactionPasser.address, + canonicalTransactionChain.address, + ], + { + gasLimit: 6700000, + } + ) + }) -// const enqueueAndGenerateBlock = async ( -// block: string[], -// blockIndex: number, -// cumulativePrevElements: number -// ): Promise => { -// // Submit the rollup block on-chain -// const enqueueTx = await l1ToL2TxQueue -// .connect(l1ToL2TransactionPasser) -// .enqueueBlock(block) -// const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) -// // Generate a local version of the rollup block -// const ethBlockNumber = txReceipt.blockNumber -// const localBlock = new DefaultRollupBlock( -// ethBlockNumber, -// blockIndex, -// cumulativePrevElements, -// block -// ) -// await localBlock.generateTree() -// return localBlock -// } + /* + * Test enqueueBlock() + */ + describe('enqueueBlock() ', async () => { + it('should allow enqueue from l1ToL2TransactionPasser', async () => { + const block = ['0x1234'] + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) // Did not throw... success! + }) + it('should not allow enqueue from other address', async () => { + const block = ['0x1234'] + await l1ToL2TxQueue + .enqueueBlock(block) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' + ) + }) + }) + /* + * Test dequeueBlock() + */ + describe('dequeueBlock() ', async () => { + it('should allow dequeue from canonicalTransactionChain', async () => { + const block = ['0x1234'] + const cumulativePrevElements = 0 + const blockIndex = 0 + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) + let blocksLength = await l1ToL2TxQueue.getBlocksLength() + log.debug(`blocksLength before deletion: ${blocksLength}`) + let front = await l1ToL2TxQueue.front() + log.debug(`front before deletion: ${front}`) + let firstBlockHash = await l1ToL2TxQueue.blocks(0) + log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) -// /* -// * Test enqueueBlock() -// */ -// describe('enqueueBlock() ', async () => { -// it('should allow enqueue from l1ToL2TransactionPasser', async () => { -// const block = ['0x1234'] -// await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) // Did not throw... success! -// }) -// it('should not allow enqueue from other address', async () => { -// const block = ['0x1234'] -// await l1ToL2TxQueue -// .enqueueBlock(block) -// .should.be.revertedWith( -// 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' -// ) -// }) -// }) -// /* -// * Test dequeueBlock() -// */ -// describe('dequeueBlock() ', async () => { -// it('should allow dequeue from canonicalTransactionChain', async () => { -// const block = ['0x1234'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// let blocksLength = await l1ToL2TxQueue.getBlocksLength() -// log.debug(`blocksLength before deletion: ${blocksLength}`) -// let front = await l1ToL2TxQueue.front() -// log.debug(`front before deletion: ${front}`) -// let firstBlockHash = await l1ToL2TxQueue.blocks(0) -// log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) + // delete the single appended block + await l1ToL2TxQueue + .connect(canonicalTransactionChain) + .dequeueBeforeInclusive(blockIndex) -// // delete the single appended block -// await l1ToL2TxQueue -// .connect(canonicalTransactionChain) -// .dequeueBeforeInclusive(blockIndex) - -// blocksLength = await l1ToL2TxQueue.getBlocksLength() -// log.debug(`blocksLength after deletion: ${blocksLength}`) -// blocksLength.should.equal(1) -// firstBlockHash = await l1ToL2TxQueue.blocks(0) -// log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) -// firstBlockHash.should.equal( -// '0x0000000000000000000000000000000000000000000000000000000000000000' -// ) -// front = await l1ToL2TxQueue.front() -// log.debug(`front after deletion: ${front}`) -// front.should.equal(1) -// }) -// it('should not allow dequeue from other address', async () => { -// const block = ['0x1234'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// await l1ToL2TxQueue -// .dequeueBeforeInclusive(blockIndex) -// .should.be.revertedWith( -// 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' -// ) -// }) -// }) -// }) + blocksLength = await l1ToL2TxQueue.getBlocksLength() + log.debug(`blocksLength after deletion: ${blocksLength}`) + blocksLength.should.equal(1) + firstBlockHash = await l1ToL2TxQueue.blocks(0) + log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) + firstBlockHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + front = await l1ToL2TxQueue.front() + log.debug(`front after deletion: ${front}`) + front.should.equal(1) + }) + it('should not allow dequeue from other address', async () => { + const block = ['0x1234'] + const cumulativePrevElements = 0 + const blockIndex = 0 + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) + await l1ToL2TxQueue + .dequeueBeforeInclusive(blockIndex) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' + ) + }) + }) +}) diff --git a/packages/rollup-contracts/test/rollup-list/RollupList.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupList.spec.ts deleted file mode 100644 index 6401fe4d618f0..0000000000000 --- a/packages/rollup-contracts/test/rollup-list/RollupList.spec.ts +++ /dev/null @@ -1,392 +0,0 @@ -import '../setup' - -/* External Imports */ -import { getLogger } from '@eth-optimism/core-utils' -import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' - -/* Internal Imports */ -import { DefaultRollupBlock } from './RLhelper' - -/* Logging */ -const log = getLogger('rollup-list', true) - -/* Contract Imports */ -import * as RollupList from '../../build/RollupList.json' -import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' - -/* Begin tests */ -// describe('RollupList', () => { -// const provider = createMockProvider() -// const [wallet1, wallet2] = getWallets(provider) -// let rollupList -// let rollupMerkleUtils -// let rollupCtLogFilter - -// /* Link libraries before tests */ -// before(async () => { -// rollupMerkleUtils = await deployContract(wallet1, RollupMerkleUtils, [], { -// gasLimit: 6700000, -// }) -// }) - -// /* Deploy a new RollupChain before each test */ -// beforeEach(async () => { -// rollupList = await deployContract( -// wallet1, -// RollupList, -// [rollupMerkleUtils.address], -// { -// gasLimit: 6700000, -// } -// ) -// rollupCtLogFilter = { -// address: rollupList.address, -// fromBlock: 0, -// toBlock: 'latest', -// } -// }) - -// const enqueueAndGenerateBlock = async ( -// block: string[], -// blockIndex: number, -// cumulativePrevElements: number -// ): Promise => { -// // Submit the rollup block on-chain -// const enqueueTx = await rollupList.enqueueBlock(block) -// const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) -// // Generate a local version of the rollup block -// const ethBlockNumber = txReceipt.blockNumber -// const localBlock = new DefaultRollupBlock( -// ethBlockNumber, -// blockIndex, -// cumulativePrevElements, -// block -// ) -// await localBlock.generateTree() -// return localBlock -// } -// /* -// * Test enqueueBlock() -// */ -// describe('enqueueBlock() ', async () => { -// it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { -// const block = ['0x1234', '0x1234'] -// await rollupList.enqueueBlock(block) // Did not throw... success! -// }) - -// it('should throw if submitting an empty block', async () => { -// const emptyBlock = [] -// try { -// await rollupList.enqueueBlock(emptyBlock) -// } catch (err) { -// // Success we threw an error! -// return -// } -// throw new Error('Allowed an empty block to be appended') -// }) - -// it('should add to blocks array', async () => { -// const block = ['0x1234', '0x6578'] -// const output = await rollupList.enqueueBlock(block) -// log.debug('enqueue block output', JSON.stringify(output)) -// const blocksLength = await rollupList.getBlocksLength() -// blocksLength.toNumber().should.equal(1) -// }) - -// it('should update cumulativeNumElements correctly', async () => { -// const block = ['0x1234', '0x5678'] -// await rollupList.enqueueBlock(block) -// const cumulativeNumElements = await rollupList.cumulativeNumElements.call() -// cumulativeNumElements.toNumber().should.equal(2) -// }) - -// it('should calculate blockHeaderHash correctly', async () => { -// const block = ['0x1234', '0x5678'] -// const blockIndex = 0 -// const cumulativePrevElements = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// //Check blockHeaderHash -// const expectedBlockHeaderHash = await localBlock.hashBlockHeader() -// const calculatedBlockHeaderHash = await rollupList.blocks(0) -// calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) -// }) - -// it('should add multiple blocks correctly', async () => { -// const block = ['0x1234', '0x5678'] -// const numBlocks = 10 -// for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { -// const cumulativePrevElements = block.length * blockIndex -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// //Check blockHeaderHash -// const expectedBlockHeaderHash = await localBlock.hashBlockHeader() -// const calculatedBlockHeaderHash = await rollupList.blocks(blockIndex) -// calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) -// } -// //check cumulativeNumElements -// const cumulativeNumElements = await rollupList.cumulativeNumElements.call() -// cumulativeNumElements.toNumber().should.equal(numBlocks * block.length) -// //check blocks length -// const blocksLength = await rollupList.getBlocksLength() -// blocksLength.toNumber().should.equal(numBlocks) -// }) -// //TODO test with actual transitions and actual state roots -// //TODO test above with multiple blocks with different # elements and different size elements -// }) - -// /* -// * Test verifyElement() -// */ -// describe('verifyElement() ', async () => { -// it('should return true for valid elements for different blockIndexs', async () => { -// const maxBlockNumber = 5 -// const minBlockNumber = 0 -// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] -// // Create trees of multiple sizes tree -// for ( -// let blockIndex = minBlockNumber; -// blockIndex < maxBlockNumber + 1; -// blockIndex++ -// ) { -// log.debug(`testing valid proof for block #: ${blockIndex}`) -// const cumulativePrevElements = block.length * blockIndex -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// // Create inclusion proof for the element at elementIndex -// const elementIndex = 3 -// const element = block[elementIndex] -// const position = localBlock.getPosition(elementIndex) -// const elementInclusionProof = await localBlock.getElementInclusionProof( -// elementIndex -// ) -// log.debug( -// `trying to correctly verify this inclusion proof: ${JSON.stringify( -// elementInclusionProof -// )}` -// ) -// //run verifyElement() -// // -// const isIncluded = await rollupList.verifyElement( -// element, -// position, -// elementInclusionProof -// ) -// log.debug('isIncluded: ', JSON.stringify(isIncluded)) -// isIncluded.should.equal(true) -// } -// }) - -// it('should return false for wrong position with wrong indexInBlock', async () => { -// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// const elementIndex = 1 -// const element = block[elementIndex] -// const position = localBlock.getPosition(elementIndex) -// const elementInclusionProof = await localBlock.getElementInclusionProof( -// elementIndex -// ) -// log.debug( -// `trying to falsely verify this inclusion proof: ${JSON.stringify( -// elementInclusionProof -// )}` -// ) -// //Give wrong position so inclusion proof is wrong -// const wrongPosition = position + 1 -// //run verifyElement() -// // -// const isIncluded = await rollupList.verifyElement( -// element, -// wrongPosition, -// elementInclusionProof -// ) -// log.debug('isIncluded: ', JSON.stringify(isIncluded)) -// isIncluded.should.equal(false) -// }) - -// it('should return false for wrong position and matching indexInBlock', async () => { -// const block = ['0x1234', '0x4567', '0x890a', '0xabcd'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// //generate inclusion proof -// const elementIndex = 1 -// const element = block[elementIndex] -// const position = localBlock.getPosition(elementIndex) -// const elementInclusionProof = await localBlock.getElementInclusionProof( -// elementIndex -// ) -// //Give wrong position so inclusion proof is wrong -// const wrongPosition = position + 1 -// //Change index to also be false (so position = index + cumulative) -// elementInclusionProof.indexInBlock++ -// log.debug( -// `trying to falsely verify this inclusion proof: ${JSON.stringify( -// elementInclusionProof -// )}` -// ) -// //run verifyElement() -// // -// const isIncluded = await rollupList.verifyElement( -// element, -// wrongPosition, -// elementInclusionProof -// ) -// log.debug('isIncluded: ', JSON.stringify(isIncluded)) -// isIncluded.should.equal(false) -// }) -// }) - -// /* -// * Test deleteAfterInclusive() -// */ -// describe('deleteAfterInclusive() ', async () => { -// it('should delete single block', async () => { -// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// const blockHeader = { -// ethBlockNumber: localBlock.ethBlockNumber, -// elementsMerkleRoot: await localBlock.elementsMerkleTree.getRootHash(), -// numElementsInBlock: block.length, -// cumulativePrevElements, -// } -// // Submit the rollup block on-chain -// let blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength before deletion: ${blocksLength}`) -// await rollupList.deleteAfterInclusive( -// blockIndex, // delete the single appended block -// blockHeader -// ) -// blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength after deletion: ${blocksLength}`) -// blocksLength.should.equal(0) -// }) - -// it('should delete many blocks', async () => { -// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] -// const localBlocks = [] -// for (let blockIndex = 0; blockIndex < 5; blockIndex++) { -// const cumulativePrevElements = blockIndex * block.length -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// localBlocks.push(localBlock) -// } -// const deleteBlockNumber = 0 -// const deleteBlock = localBlocks[deleteBlockNumber] -// const blockHeader = { -// ethBlockNumber: deleteBlock.ethBlockNumber, -// elementsMerkleRoot: deleteBlock.elementsMerkleTree.getRootHash(), -// numElementsInBlock: block.length, -// cumulativePrevElements: deleteBlock.cumulativePrevElements, -// } -// let blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength before deletion: ${blocksLength}`) -// await rollupList.deleteAfterInclusive( -// deleteBlockNumber, // delete all blocks (including and after block 0) -// blockHeader -// ) -// blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength after deletion: ${blocksLength}`) -// blocksLength.should.equal(0) -// }) -// }) - -// describe('dequeueBeforeInclusive()', async () => { -// it('should dequeue single block', async () => { -// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// let blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength before deletion: ${blocksLength}`) -// let front = await rollupList.front() -// log.debug(`front before deletion: ${front}`) -// let firstBlockHash = await rollupList.blocks(0) -// log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) - -// // delete the single appended block -// await rollupList.dequeueBeforeInclusive(blockIndex) - -// blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength after deletion: ${blocksLength}`) -// blocksLength.should.equal(1) -// firstBlockHash = await rollupList.blocks(0) -// log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) -// firstBlockHash.should.equal( -// '0x0000000000000000000000000000000000000000000000000000000000000000' -// ) -// front = await rollupList.front() -// log.debug(`front after deletion: ${front}`) -// front.should.equal(1) -// }) - -// it('should dequeue many blocks', async () => { -// const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] -// const localBlocks = [] -// const numBlocks = 5 -// for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { -// const cumulativePrevElements = block.length * blockIndex -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// localBlocks.push(localBlock) -// } -// let blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength before deletion: ${blocksLength}`) -// let front = await rollupList.front() -// log.debug(`front before deletion: ${front}`) -// for (let i = 0; i < numBlocks; i++) { -// const ithBlockHash = await rollupList.blocks(i) -// log.debug(`blockHash #${i} before deletion: ${ithBlockHash}`) -// } -// await rollupList.dequeueBeforeInclusive(numBlocks - 1) -// blocksLength = await rollupList.getBlocksLength() -// log.debug(`blocksLength after deletion: ${blocksLength}`) -// blocksLength.should.equal(numBlocks) -// front = await rollupList.front() -// log.debug(`front after deletion: ${front}`) -// front.should.equal(numBlocks) -// for (let i = 0; i < numBlocks; i++) { -// const ithBlockHash = await rollupList.blocks(i) -// log.debug(`blockHash #${i} after deletion: ${ithBlockHash}`) -// ithBlockHash.should.equal( -// '0x0000000000000000000000000000000000000000000000000000000000000000' -// ) -// } -// }) -// }) -// }) diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts new file mode 100644 index 0000000000000..da677008c2722 --- /dev/null +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -0,0 +1,392 @@ +import '../setup' + +/* External Imports */ +import { getLogger } from '@eth-optimism/core-utils' +import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' + +/* Internal Imports */ +import { DefaultRollupBlock } from './RLhelper' + +/* Logging */ +const log = getLogger('rollup-queue', true) + +/* Contract Imports */ +import * as RollupQueue from '../../build/RollupQueue.json' +import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' + +/* Begin tests */ +describe('RollupQueue', () => { + const provider = createMockProvider() + const [wallet1, wallet2] = getWallets(provider) + let rollupQueue + let rollupMerkleUtils + let rollupCtLogFilter + + /* Link libraries before tests */ + before(async () => { + rollupMerkleUtils = await deployContract(wallet1, RollupMerkleUtils, [], { + gasLimit: 6700000, + }) + }) + + /* Deploy a new RollupChain before each test */ + beforeEach(async () => { + rollupQueue = await deployContract( + wallet1, + RollupQueue, + [rollupMerkleUtils.address], + { + gasLimit: 6700000, + } + ) + rollupCtLogFilter = { + address: rollupQueue.address, + fromBlock: 0, + toBlock: 'latest', + } + }) + + // const enqueueAndGenerateBlock = async ( + // block: string[], + // blockIndex: number, + // cumulativePrevElements: number + // ): Promise => { + // // Submit the rollup block on-chain + // const enqueueTx = await rollupQueue.enqueueBlock(block) + // const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) + // // Generate a local version of the rollup block + // const ethBlockNumber = txReceipt.blockNumber + // const localBlock = new DefaultRollupBlock( + // ethBlockNumber, + // blockIndex, + // cumulativePrevElements, + // block + // ) + // await localBlock.generateTree() + // return localBlock + // } + /* + * Test enqueueBlock() + */ + describe('enqueueBlock() ', async () => { + it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { + const block = ['0x1234', '0x1234'] + await rollupQueue.enqueueBlock(block) // Did not throw... success! + }) + + it('should throw if submitting an empty block', async () => { + const emptyBlock = [] + try { + await rollupQueue.enqueueBlock(emptyBlock) + } catch (err) { + // Success we threw an error! + return + } + throw new Error('Allowed an empty block to be appended') + }) + + it('should add to blocks array', async () => { + const block = ['0x1234', '0x6578'] + const output = await rollupQueue.enqueueBlock(block) + log.debug('enqueue block output', JSON.stringify(output)) + const blocksLength = await rollupQueue.getBlocksLength() + blocksLength.toNumber().should.equal(1) + }) + + it('should update cumulativeNumElements correctly', async () => { + const block = ['0x1234', '0x5678'] + await rollupQueue.enqueueBlock(block) + const cumulativeNumElements = await rollupQueue.cumulativeNumElements.call() + cumulativeNumElements.toNumber().should.equal(2) + }) + + // it('should calculate blockHeaderHash correctly', async () => { + // const block = ['0x1234', '0x5678'] + // const blockIndex = 0 + // const cumulativePrevElements = 0 + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // //Check blockHeaderHash + // const expectedBlockHeaderHash = await localBlock.hashBlockHeader() + // const calculatedBlockHeaderHash = await rollupQueue.blocks(0) + // calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + // }) + + // it('should add multiple blocks correctly', async () => { + // const block = ['0x1234', '0x5678'] + // const numBlocks = 10 + // for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { + // const cumulativePrevElements = block.length * blockIndex + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // //Check blockHeaderHash + // const expectedBlockHeaderHash = await localBlock.hashBlockHeader() + // const calculatedBlockHeaderHash = await rollupQueue.blocks(blockIndex) + // calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + // } + // //check cumulativeNumElements + // const cumulativeNumElements = await rollupQueue.cumulativeNumElements.call() + // cumulativeNumElements.toNumber().should.equal(numBlocks * block.length) + // //check blocks length + // const blocksLength = await rollupQueue.getBlocksLength() + // blocksLength.toNumber().should.equal(numBlocks) + // }) + //TODO test with actual transitions and actual state roots + //TODO test above with multiple blocks with different # elements and different size elements + }) + + // /* + // * Test verifyElement() + // */ + // describe('verifyElement() ', async () => { + // it('should return true for valid elements for different blockIndexs', async () => { + // const maxBlockNumber = 5 + // const minBlockNumber = 0 + // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + // // Create trees of multiple sizes tree + // for ( + // let blockIndex = minBlockNumber; + // blockIndex < maxBlockNumber + 1; + // blockIndex++ + // ) { + // log.debug(`testing valid proof for block #: ${blockIndex}`) + // const cumulativePrevElements = block.length * blockIndex + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // // Create inclusion proof for the element at elementIndex + // const elementIndex = 3 + // const element = block[elementIndex] + // const position = localBlock.getPosition(elementIndex) + // const elementInclusionProof = await localBlock.getElementInclusionProof( + // elementIndex + // ) + // log.debug( + // `trying to correctly verify this inclusion proof: ${JSON.stringify( + // elementInclusionProof + // )}` + // ) + // //run verifyElement() + // // + // const isIncluded = await rollupQueue.verifyElement( + // element, + // position, + // elementInclusionProof + // ) + // log.debug('isIncluded: ', JSON.stringify(isIncluded)) + // isIncluded.should.equal(true) + // } + // }) + + // it('should return false for wrong position with wrong indexInBlock', async () => { + // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + // const cumulativePrevElements = 0 + // const blockIndex = 0 + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // const elementIndex = 1 + // const element = block[elementIndex] + // const position = localBlock.getPosition(elementIndex) + // const elementInclusionProof = await localBlock.getElementInclusionProof( + // elementIndex + // ) + // log.debug( + // `trying to falsely verify this inclusion proof: ${JSON.stringify( + // elementInclusionProof + // )}` + // ) + // //Give wrong position so inclusion proof is wrong + // const wrongPosition = position + 1 + // //run verifyElement() + // // + // const isIncluded = await rollupQueue.verifyElement( + // element, + // wrongPosition, + // elementInclusionProof + // ) + // log.debug('isIncluded: ', JSON.stringify(isIncluded)) + // isIncluded.should.equal(false) + // }) + + // it('should return false for wrong position and matching indexInBlock', async () => { + // const block = ['0x1234', '0x4567', '0x890a', '0xabcd'] + // const cumulativePrevElements = 0 + // const blockIndex = 0 + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // //generate inclusion proof + // const elementIndex = 1 + // const element = block[elementIndex] + // const position = localBlock.getPosition(elementIndex) + // const elementInclusionProof = await localBlock.getElementInclusionProof( + // elementIndex + // ) + // //Give wrong position so inclusion proof is wrong + // const wrongPosition = position + 1 + // //Change index to also be false (so position = index + cumulative) + // elementInclusionProof.indexInBlock++ + // log.debug( + // `trying to falsely verify this inclusion proof: ${JSON.stringify( + // elementInclusionProof + // )}` + // ) + // //run verifyElement() + // // + // const isIncluded = await rollupQueue.verifyElement( + // element, + // wrongPosition, + // elementInclusionProof + // ) + // log.debug('isIncluded: ', JSON.stringify(isIncluded)) + // isIncluded.should.equal(false) + // }) + // }) + + // /* + // * Test deleteAfterInclusive() + // */ + // describe('deleteAfterInclusive() ', async () => { + // it('should delete single block', async () => { + // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + // const cumulativePrevElements = 0 + // const blockIndex = 0 + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // const blockHeader = { + // ethBlockNumber: localBlock.ethBlockNumber, + // elementsMerkleRoot: await localBlock.elementsMerkleTree.getRootHash(), + // numElementsInBlock: block.length, + // cumulativePrevElements, + // } + // // Submit the rollup block on-chain + // let blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength before deletion: ${blocksLength}`) + // await rollupQueue.deleteAfterInclusive( + // blockIndex, // delete the single appended block + // blockHeader + // ) + // blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength after deletion: ${blocksLength}`) + // blocksLength.should.equal(0) + // }) + + // it('should delete many blocks', async () => { + // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + // const localBlocks = [] + // for (let blockIndex = 0; blockIndex < 5; blockIndex++) { + // const cumulativePrevElements = blockIndex * block.length + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // localBlocks.push(localBlock) + // } + // const deleteBlockNumber = 0 + // const deleteBlock = localBlocks[deleteBlockNumber] + // const blockHeader = { + // ethBlockNumber: deleteBlock.ethBlockNumber, + // elementsMerkleRoot: deleteBlock.elementsMerkleTree.getRootHash(), + // numElementsInBlock: block.length, + // cumulativePrevElements: deleteBlock.cumulativePrevElements, + // } + // let blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength before deletion: ${blocksLength}`) + // await rollupQueue.deleteAfterInclusive( + // deleteBlockNumber, // delete all blocks (including and after block 0) + // blockHeader + // ) + // blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength after deletion: ${blocksLength}`) + // blocksLength.should.equal(0) + // }) + // }) + + // describe('dequeueBeforeInclusive()', async () => { + // it('should dequeue single block', async () => { + // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + // const cumulativePrevElements = 0 + // const blockIndex = 0 + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // let blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength before deletion: ${blocksLength}`) + // let front = await rollupQueue.front() + // log.debug(`front before deletion: ${front}`) + // let firstBlockHash = await rollupQueue.blocks(0) + // log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) + + // // delete the single appended block + // await rollupQueue.dequeueBeforeInclusive(blockIndex) + + // blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength after deletion: ${blocksLength}`) + // blocksLength.should.equal(1) + // firstBlockHash = await rollupQueue.blocks(0) + // log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) + // firstBlockHash.should.equal( + // '0x0000000000000000000000000000000000000000000000000000000000000000' + // ) + // front = await rollupQueue.front() + // log.debug(`front after deletion: ${front}`) + // front.should.equal(1) + // }) + + // it('should dequeue many blocks', async () => { + // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + // const localBlocks = [] + // const numBlocks = 5 + // for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { + // const cumulativePrevElements = block.length * blockIndex + // const localBlock = await enqueueAndGenerateBlock( + // block, + // blockIndex, + // cumulativePrevElements + // ) + // localBlocks.push(localBlock) + // } + // let blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength before deletion: ${blocksLength}`) + // let front = await rollupQueue.front() + // log.debug(`front before deletion: ${front}`) + // for (let i = 0; i < numBlocks; i++) { + // const ithBlockHash = await rollupQueue.blocks(i) + // log.debug(`blockHash #${i} before deletion: ${ithBlockHash}`) + // } + // await rollupQueue.dequeueBeforeInclusive(numBlocks - 1) + // blocksLength = await rollupQueue.getBlocksLength() + // log.debug(`blocksLength after deletion: ${blocksLength}`) + // blocksLength.should.equal(numBlocks) + // front = await rollupQueue.front() + // log.debug(`front after deletion: ${front}`) + // front.should.equal(numBlocks) + // for (let i = 0; i < numBlocks; i++) { + // const ithBlockHash = await rollupQueue.blocks(i) + // log.debug(`blockHash #${i} after deletion: ${ithBlockHash}`) + // ithBlockHash.should.equal( + // '0x0000000000000000000000000000000000000000000000000000000000000000' + // ) + // } + // }) + // }) +}) diff --git a/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts deleted file mode 100644 index f0b8e15b48642..0000000000000 --- a/packages/rollup-contracts/test/rollup-list/RollupTransactionQueue.spec.ts +++ /dev/null @@ -1,137 +0,0 @@ -import '../setup' - -/* External Imports */ -import { getLogger } from '@eth-optimism/core-utils' -import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' - -/* Internal Imports */ -import { DefaultRollupBlock } from './RLhelper' - -/* Logging */ -const log = getLogger('rollup-tx-queue', true) - -/* Contract Imports */ -import * as RollupTransactionQueue from '../../build/RollupTransactionQueue.json' -import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' - -/* Begin tests */ -// describe('RollupTransactionQueue', () => { -// const provider = createMockProvider() -// const [wallet, sequencer, canonicalTransactionChain] = getWallets(provider) -// let rollupTxQueue -// let rollupMerkleUtils - -// /* Link libraries before tests */ -// before(async () => { -// rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { -// gasLimit: 6700000, -// }) -// }) - -// /* Deploy a new RollupChain before each test */ -// beforeEach(async () => { -// rollupTxQueue = await deployContract( -// wallet, -// RollupTransactionQueue, -// [ -// rollupMerkleUtils.address, -// sequencer.address, -// canonicalTransactionChain.address, -// ], -// { -// gasLimit: 6700000, -// } -// ) -// }) - -// const enqueueAndGenerateBlock = async ( -// block: string[], -// blockIndex: number, -// cumulativePrevElements: number -// ): Promise => { -// // Submit the rollup block on-chain -// const enqueueTx = await rollupTxQueue.connect(sequencer).enqueueBlock(block) -// const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) -// // Generate a local version of the rollup block -// const ethBlockNumber = txReceipt.blockNumber -// const localBlock = new DefaultRollupBlock( -// ethBlockNumber, -// blockIndex, -// cumulativePrevElements, -// block -// ) -// await localBlock.generateTree() -// return localBlock -// } - -// /* -// * Test enqueueBlock() -// */ -// describe('enqueueBlock() ', async () => { -// it('should allow enqueue from sequencer', async () => { -// const block = ['0x1234'] -// await rollupTxQueue.connect(sequencer).enqueueBlock(block) // Did not throw... success! -// }) -// it('should not allow enqueue from other address', async () => { -// const block = ['0x1234'] -// await rollupTxQueue -// .enqueueBlock(block) -// .should.be.revertedWith( -// 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' -// ) -// }) -// }) -// /* -// * Test dequeueBlock() -// */ -// describe('dequeueBlock() ', async () => { -// it('should allow dequeue from canonicalTransactionChain', async () => { -// const block = ['0x1234'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// let blocksLength = await rollupTxQueue.getBlocksLength() -// log.debug(`blocksLength before deletion: ${blocksLength}`) -// let front = await rollupTxQueue.front() -// log.debug(`front before deletion: ${front}`) -// let firstBlockHash = await rollupTxQueue.blocks(0) -// log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) - -// // delete the single appended block -// await rollupTxQueue -// .connect(canonicalTransactionChain) -// .dequeueBeforeInclusive(blockIndex) - -// blocksLength = await rollupTxQueue.getBlocksLength() -// log.debug(`blocksLength after deletion: ${blocksLength}`) -// blocksLength.should.equal(1) -// firstBlockHash = await rollupTxQueue.blocks(0) -// log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) -// firstBlockHash.should.equal( -// '0x0000000000000000000000000000000000000000000000000000000000000000' -// ) -// front = await rollupTxQueue.front() -// log.debug(`front after deletion: ${front}`) -// front.should.equal(1) -// }) -// it('should not allow dequeue from other address', async () => { -// const block = ['0x1234'] -// const cumulativePrevElements = 0 -// const blockIndex = 0 -// const localBlock = await enqueueAndGenerateBlock( -// block, -// blockIndex, -// cumulativePrevElements -// ) -// await rollupTxQueue -// .dequeueBeforeInclusive(blockIndex) -// .should.be.revertedWith( -// 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' -// ) -// }) -// }) -// }) From 01d29e23031513711055311ff48749d436692fd0 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 18 May 2020 13:58:09 -0400 Subject: [PATCH 06/37] RollupQueue tests --- .../contracts/RollupQueue.sol | 12 +- .../test/rollup-list/RLhelper.ts | 39 ++ .../test/rollup-list/RollupQueue.spec.ts | 406 ++++-------------- 3 files changed, 140 insertions(+), 317 deletions(-) diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index 3ecaab61762b7..81057f3de6207 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -38,12 +38,12 @@ contract RollupQueue { require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); require(_rollupBlock.length > 0, "Cannot submit an empty block"); // calculate block header - bytes32 blockHeaderHash = keccak256(abi.encodePacked( - block.number, // ethBlockNumber - merkleUtils.getMerkleRoot(_rollupBlock), // elementsMerkleRoot - _rollupBlock.length, // numElementsInBlock - cumulativeNumElements // cumulativeNumElements - )); + bytes32 blockHeaderHash = keccak256( + abi.encodePacked( + merkleUtils.getMerkleRoot(_rollupBlock), // elementsMerkleRoot + _rollupBlock.length // numElementsInBlock + ) + ); // store block header blocks.push(blockHeaderHash); // update cumulative elements diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index ba5c034f7022d..024999516aae0 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -130,3 +130,42 @@ export class DefaultRollupBlock { } } } +/* + * Helper class which provides all information requried for a particular + * Rollup block. This includes all of the tranisitions in readable form + * as well as the merkle tree which it generates. + */ +export class RollupQueueBatch { + public elements: string[] //Rollup block + public elementsMerkleTree: SparseMerkleTreeImpl + + constructor(elements: string[]) { + this.elements = elements + } + /* + * Generate the elements merkle tree from this.elements + */ + public async generateTree(): Promise { + // Create a tree! + const treeHeight = Math.ceil(Math.log2(this.elements.length)) + 1 // The height should actually not be plus 1 + this.elementsMerkleTree = await SparseMerkleTreeImpl.create( + newInMemoryDB(), + undefined, + treeHeight + ) + for (let i = 0; i < this.elements.length; i++) { + await this.elementsMerkleTree.update( + new BigNumber(i, 10), + hexStrToBuf(this.elements[i]) + ) + } + } + + public async hashBlockHeader(): Promise { + const bufferRoot = await this.elementsMerkleTree.getRootHash() + return utils.solidityKeccak256( + ['bytes32', 'uint'], + [bufToHexString(bufferRoot), this.elements.length] + ) + } +} diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index da677008c2722..2d4a78243839a 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -5,7 +5,7 @@ import { getLogger } from '@eth-optimism/core-utils' import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' /* Internal Imports */ -import { DefaultRollupBlock } from './RLhelper' +import { RollupQueueBatch } from './RLhelper' /* Logging */ const log = getLogger('rollup-queue', true) @@ -20,7 +20,6 @@ describe('RollupQueue', () => { const [wallet1, wallet2] = getWallets(provider) let rollupQueue let rollupMerkleUtils - let rollupCtLogFilter /* Link libraries before tests */ before(async () => { @@ -39,32 +38,18 @@ describe('RollupQueue', () => { gasLimit: 6700000, } ) - rollupCtLogFilter = { - address: rollupQueue.address, - fromBlock: 0, - toBlock: 'latest', - } }) - // const enqueueAndGenerateBlock = async ( - // block: string[], - // blockIndex: number, - // cumulativePrevElements: number - // ): Promise => { - // // Submit the rollup block on-chain - // const enqueueTx = await rollupQueue.enqueueBlock(block) - // const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) - // // Generate a local version of the rollup block - // const ethBlockNumber = txReceipt.blockNumber - // const localBlock = new DefaultRollupBlock( - // ethBlockNumber, - // blockIndex, - // cumulativePrevElements, - // block - // ) - // await localBlock.generateTree() - // return localBlock - // } + const enqueueAndGenerateBlock = async ( + block: string[] + ): Promise => { + // Submit the rollup block on-chain + await rollupQueue.enqueueBlock(block) + // Generate a local version of the rollup block + const localBlock = new RollupQueueBatch(block) + await localBlock.generateTree() + return localBlock + } /* * Test enqueueBlock() */ @@ -100,293 +85,92 @@ describe('RollupQueue', () => { cumulativeNumElements.toNumber().should.equal(2) }) - // it('should calculate blockHeaderHash correctly', async () => { - // const block = ['0x1234', '0x5678'] - // const blockIndex = 0 - // const cumulativePrevElements = 0 - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // //Check blockHeaderHash - // const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - // const calculatedBlockHeaderHash = await rollupQueue.blocks(0) - // calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) - // }) + it('should calculate blockHeaderHash correctly', async () => { + const block = ['0x1234', '0x5678'] + const localBlock = await enqueueAndGenerateBlock(block) + //Check blockHeaderHash + const expectedBlockHeaderHash = await localBlock.hashBlockHeader() + const calculatedBlockHeaderHash = await rollupQueue.blocks(0) + calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + }) - // it('should add multiple blocks correctly', async () => { - // const block = ['0x1234', '0x5678'] - // const numBlocks = 10 - // for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { - // const cumulativePrevElements = block.length * blockIndex - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // //Check blockHeaderHash - // const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - // const calculatedBlockHeaderHash = await rollupQueue.blocks(blockIndex) - // calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) - // } - // //check cumulativeNumElements - // const cumulativeNumElements = await rollupQueue.cumulativeNumElements.call() - // cumulativeNumElements.toNumber().should.equal(numBlocks * block.length) - // //check blocks length - // const blocksLength = await rollupQueue.getBlocksLength() - // blocksLength.toNumber().should.equal(numBlocks) - // }) - //TODO test with actual transitions and actual state roots - //TODO test above with multiple blocks with different # elements and different size elements + it('should add multiple blocks correctly', async () => { + const block = ['0x1234', '0x5678'] + const numBlocks = 10 + for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { + const cumulativePrevElements = block.length * blockIndex + const localBlock = await enqueueAndGenerateBlock(block) + //Check blockHeaderHash + const expectedBlockHeaderHash = await localBlock.hashBlockHeader() + const calculatedBlockHeaderHash = await rollupQueue.blocks(blockIndex) + calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + } + //check blocks length + const blocksLength = await rollupQueue.getBlocksLength() + blocksLength.toNumber().should.equal(numBlocks) + }) }) - // /* - // * Test verifyElement() - // */ - // describe('verifyElement() ', async () => { - // it('should return true for valid elements for different blockIndexs', async () => { - // const maxBlockNumber = 5 - // const minBlockNumber = 0 - // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - // // Create trees of multiple sizes tree - // for ( - // let blockIndex = minBlockNumber; - // blockIndex < maxBlockNumber + 1; - // blockIndex++ - // ) { - // log.debug(`testing valid proof for block #: ${blockIndex}`) - // const cumulativePrevElements = block.length * blockIndex - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // // Create inclusion proof for the element at elementIndex - // const elementIndex = 3 - // const element = block[elementIndex] - // const position = localBlock.getPosition(elementIndex) - // const elementInclusionProof = await localBlock.getElementInclusionProof( - // elementIndex - // ) - // log.debug( - // `trying to correctly verify this inclusion proof: ${JSON.stringify( - // elementInclusionProof - // )}` - // ) - // //run verifyElement() - // // - // const isIncluded = await rollupQueue.verifyElement( - // element, - // position, - // elementInclusionProof - // ) - // log.debug('isIncluded: ', JSON.stringify(isIncluded)) - // isIncluded.should.equal(true) - // } - // }) - - // it('should return false for wrong position with wrong indexInBlock', async () => { - // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - // const cumulativePrevElements = 0 - // const blockIndex = 0 - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // const elementIndex = 1 - // const element = block[elementIndex] - // const position = localBlock.getPosition(elementIndex) - // const elementInclusionProof = await localBlock.getElementInclusionProof( - // elementIndex - // ) - // log.debug( - // `trying to falsely verify this inclusion proof: ${JSON.stringify( - // elementInclusionProof - // )}` - // ) - // //Give wrong position so inclusion proof is wrong - // const wrongPosition = position + 1 - // //run verifyElement() - // // - // const isIncluded = await rollupQueue.verifyElement( - // element, - // wrongPosition, - // elementInclusionProof - // ) - // log.debug('isIncluded: ', JSON.stringify(isIncluded)) - // isIncluded.should.equal(false) - // }) - - // it('should return false for wrong position and matching indexInBlock', async () => { - // const block = ['0x1234', '0x4567', '0x890a', '0xabcd'] - // const cumulativePrevElements = 0 - // const blockIndex = 0 - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // //generate inclusion proof - // const elementIndex = 1 - // const element = block[elementIndex] - // const position = localBlock.getPosition(elementIndex) - // const elementInclusionProof = await localBlock.getElementInclusionProof( - // elementIndex - // ) - // //Give wrong position so inclusion proof is wrong - // const wrongPosition = position + 1 - // //Change index to also be false (so position = index + cumulative) - // elementInclusionProof.indexInBlock++ - // log.debug( - // `trying to falsely verify this inclusion proof: ${JSON.stringify( - // elementInclusionProof - // )}` - // ) - // //run verifyElement() - // // - // const isIncluded = await rollupQueue.verifyElement( - // element, - // wrongPosition, - // elementInclusionProof - // ) - // log.debug('isIncluded: ', JSON.stringify(isIncluded)) - // isIncluded.should.equal(false) - // }) - // }) - - // /* - // * Test deleteAfterInclusive() - // */ - // describe('deleteAfterInclusive() ', async () => { - // it('should delete single block', async () => { - // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - // const cumulativePrevElements = 0 - // const blockIndex = 0 - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // const blockHeader = { - // ethBlockNumber: localBlock.ethBlockNumber, - // elementsMerkleRoot: await localBlock.elementsMerkleTree.getRootHash(), - // numElementsInBlock: block.length, - // cumulativePrevElements, - // } - // // Submit the rollup block on-chain - // let blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength before deletion: ${blocksLength}`) - // await rollupQueue.deleteAfterInclusive( - // blockIndex, // delete the single appended block - // blockHeader - // ) - // blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength after deletion: ${blocksLength}`) - // blocksLength.should.equal(0) - // }) - - // it('should delete many blocks', async () => { - // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - // const localBlocks = [] - // for (let blockIndex = 0; blockIndex < 5; blockIndex++) { - // const cumulativePrevElements = blockIndex * block.length - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // localBlocks.push(localBlock) - // } - // const deleteBlockNumber = 0 - // const deleteBlock = localBlocks[deleteBlockNumber] - // const blockHeader = { - // ethBlockNumber: deleteBlock.ethBlockNumber, - // elementsMerkleRoot: deleteBlock.elementsMerkleTree.getRootHash(), - // numElementsInBlock: block.length, - // cumulativePrevElements: deleteBlock.cumulativePrevElements, - // } - // let blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength before deletion: ${blocksLength}`) - // await rollupQueue.deleteAfterInclusive( - // deleteBlockNumber, // delete all blocks (including and after block 0) - // blockHeader - // ) - // blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength after deletion: ${blocksLength}`) - // blocksLength.should.equal(0) - // }) - // }) - - // describe('dequeueBeforeInclusive()', async () => { - // it('should dequeue single block', async () => { - // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - // const cumulativePrevElements = 0 - // const blockIndex = 0 - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // let blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength before deletion: ${blocksLength}`) - // let front = await rollupQueue.front() - // log.debug(`front before deletion: ${front}`) - // let firstBlockHash = await rollupQueue.blocks(0) - // log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) - - // // delete the single appended block - // await rollupQueue.dequeueBeforeInclusive(blockIndex) - - // blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength after deletion: ${blocksLength}`) - // blocksLength.should.equal(1) - // firstBlockHash = await rollupQueue.blocks(0) - // log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) - // firstBlockHash.should.equal( - // '0x0000000000000000000000000000000000000000000000000000000000000000' - // ) - // front = await rollupQueue.front() - // log.debug(`front after deletion: ${front}`) - // front.should.equal(1) - // }) + describe('dequeueBeforeInclusive()', async () => { + it('should dequeue single block', async () => { + const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const cumulativePrevElements = 0 + const blockIndex = 0 + const localBlock = await enqueueAndGenerateBlock(block) + let blocksLength = await rollupQueue.getBlocksLength() + log.debug(`blocksLength before deletion: ${blocksLength}`) + let front = await rollupQueue.front() + log.debug(`front before deletion: ${front}`) + let firstBlockHash = await rollupQueue.blocks(0) + log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) + + // delete the single appended block + await rollupQueue.dequeueBeforeInclusive(blockIndex) + + blocksLength = await rollupQueue.getBlocksLength() + log.debug(`blocksLength after deletion: ${blocksLength}`) + blocksLength.should.equal(1) + firstBlockHash = await rollupQueue.blocks(0) + log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) + firstBlockHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + front = await rollupQueue.front() + log.debug(`front after deletion: ${front}`) + front.should.equal(1) + }) - // it('should dequeue many blocks', async () => { - // const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - // const localBlocks = [] - // const numBlocks = 5 - // for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { - // const cumulativePrevElements = block.length * blockIndex - // const localBlock = await enqueueAndGenerateBlock( - // block, - // blockIndex, - // cumulativePrevElements - // ) - // localBlocks.push(localBlock) - // } - // let blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength before deletion: ${blocksLength}`) - // let front = await rollupQueue.front() - // log.debug(`front before deletion: ${front}`) - // for (let i = 0; i < numBlocks; i++) { - // const ithBlockHash = await rollupQueue.blocks(i) - // log.debug(`blockHash #${i} before deletion: ${ithBlockHash}`) - // } - // await rollupQueue.dequeueBeforeInclusive(numBlocks - 1) - // blocksLength = await rollupQueue.getBlocksLength() - // log.debug(`blocksLength after deletion: ${blocksLength}`) - // blocksLength.should.equal(numBlocks) - // front = await rollupQueue.front() - // log.debug(`front after deletion: ${front}`) - // front.should.equal(numBlocks) - // for (let i = 0; i < numBlocks; i++) { - // const ithBlockHash = await rollupQueue.blocks(i) - // log.debug(`blockHash #${i} after deletion: ${ithBlockHash}`) - // ithBlockHash.should.equal( - // '0x0000000000000000000000000000000000000000000000000000000000000000' - // ) - // } - // }) - // }) + it('should dequeue many blocks', async () => { + const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const localBlocks = [] + const numBlocks = 5 + for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { + const cumulativePrevElements = block.length * blockIndex + const localBlock = await enqueueAndGenerateBlock(block) + localBlocks.push(localBlock) + } + let blocksLength = await rollupQueue.getBlocksLength() + log.debug(`blocksLength before deletion: ${blocksLength}`) + let front = await rollupQueue.front() + log.debug(`front before deletion: ${front}`) + for (let i = 0; i < numBlocks; i++) { + const ithBlockHash = await rollupQueue.blocks(i) + log.debug(`blockHash #${i} before deletion: ${ithBlockHash}`) + } + await rollupQueue.dequeueBeforeInclusive(numBlocks - 1) + blocksLength = await rollupQueue.getBlocksLength() + log.debug(`blocksLength after deletion: ${blocksLength}`) + blocksLength.should.equal(numBlocks) + front = await rollupQueue.front() + log.debug(`front after deletion: ${front}`) + front.should.equal(numBlocks) + for (let i = 0; i < numBlocks; i++) { + const ithBlockHash = await rollupQueue.blocks(i) + log.debug(`blockHash #${i} after deletion: ${ithBlockHash}`) + ithBlockHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + } + }) + }) }) From 2e1b9ffb2ba3fba61a7a0930a42b3547848d9243 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 18 May 2020 14:38:57 -0400 Subject: [PATCH 07/37] rename block -> batch --- .../contracts/CanonicalTransactionChain.sol | 66 +++--- .../rollup-contracts/contracts/DataTypes.sol | 10 +- .../contracts/RollupQueue.sol | 52 +++-- .../CanonicalTransactionChain.spec.ts | 192 +++++++++--------- .../L1ToL2TransactionQueue.spec.ts | 56 ++--- .../test/rollup-list/RLhelper.ts | 52 ++--- .../test/rollup-list/RollupQueue.spec.ts | 168 +++++++-------- 7 files changed, 296 insertions(+), 300 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index d8e13c0b3efdb..4ffad3962762f 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -12,8 +12,8 @@ contract CanonicalTransactionChain { // How many elements in total have been appended uint public cumulativeNumElements; - // List of block header hashes - bytes32[] public blocks; + // List of batch header hashes + bytes32[] public batches; constructor( @@ -24,50 +24,48 @@ contract CanonicalTransactionChain { sequencer = _sequencer; } - // for testing: returns length of block list - function getBlocksLength() public view returns (uint) { - return blocks.length; + // for testing: returns length of batch list + function getBatchsLength() public view returns (uint) { + return batches.length; } - function hashBlockHeader( - dt.BlockHeader memory _blockHeader + function hashBatchHeader( + dt.BatchHeader memory _batchHeader ) public pure returns (bytes32) { return keccak256(abi.encodePacked( - _blockHeader.timestamp, - _blockHeader.isL1ToL2Tx, - _blockHeader.elementsMerkleRoot, - _blockHeader.numElementsInBlock, - _blockHeader.cumulativePrevElements + _batchHeader.timestamp, + _batchHeader.isL1ToL2Tx, + _batchHeader.elementsMerkleRoot, + _batchHeader.numElementsInBatch, + _batchHeader.cumulativePrevElements )); } - function authenticateEnqueue(address _sender) public view returns (bool) { + function authenticateAppend(address _sender) public view returns (bool) { return _sender == sequencer; } - function authenticateDequeue(address _sender) public view returns (bool) { return false; } - function authenticateDelete(address _sender) public view returns (bool) { return false; } - // appends to the current list of blocks + // appends to the current list of batches function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { //Check that msg.sender is authorized to append - require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); - require(_txBatch.length > 0, "Cannot submit an empty block"); + require(authenticateAppend(msg.sender), "Message sender does not have permission to enqueue"); + require(_txBatch.length > 0, "Cannot submit an empty batch"); // require(_timestamp > lastOVMTimestamp, "timestamps must monotonically increase"); // lastOVMTimestamp = _timestamp; - // require dist(_timestamp, block.timestamp) < sequencerLivenessAssumption - // require(L1ToL2Queue.ageOfOldestQueuedBlock() < sequencerLivenessAssumption, "must process all L1->L2 blocks older than liveness assumption before processing L2 blocks.") + // require dist(_timestamp, batch.timestamp) < sequencerLivenessAssumption + // require(L1ToL2Queue.ageOfOldestQueuedBatch() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches.") - // calculate block header - bytes32 blockHeaderHash = keccak256(abi.encodePacked( + // calculate batch header + bytes32 batchHeaderHash = keccak256(abi.encodePacked( _timestamp, false, // isL1ToL2Tx merkleUtils.getMerkleRoot(_txBatch), // elementsMerkleRoot - _txBatch.length, // numElementsInBlock + _txBatch.length, // numElementsInBatch cumulativeNumElements // cumulativeNumElements )); - // store block header - blocks.push(blockHeaderHash); + // store batch header + batches.push(batchHeaderHash); // update cumulative elements cumulativeNumElements += _txBatch.length; } @@ -76,23 +74,23 @@ contract CanonicalTransactionChain { function verifyElement( bytes memory _element, // the element of the list being proven uint _position, // the position in the list of the element being proven - dt.ElementInclusionProof memory _inclusionProof // inclusion proof in the rollup block + dt.ElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch ) public view returns (bool) { - // For convenience, store the blockHeader - dt.BlockHeader memory blockHeader = _inclusionProof.blockHeader; + // For convenience, store the batchHeader + dt.BatchHeader memory batchHeader = _inclusionProof.batchHeader; // make sure absolute position equivalent to relative positions - if(_position != _inclusionProof.indexInBlock + - blockHeader.cumulativePrevElements) + if(_position != _inclusionProof.indexInBatch + + batchHeader.cumulativePrevElements) return false; // verify elementsMerkleRoot if (!merkleUtils.verify( - blockHeader.elementsMerkleRoot, + batchHeader.elementsMerkleRoot, _element, - _inclusionProof.indexInBlock, + _inclusionProof.indexInBatch, _inclusionProof.siblings )) return false; - //compare computed block header with the block header in the list. - return hashBlockHeader(blockHeader) == blocks[_inclusionProof.blockIndex]; + //compare computed batch header with the batch header in the list. + return hashBatchHeader(batchHeader) == batches[_inclusionProof.batchIndex]; } } diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index 3a5d12916604f..835ed72256ae0 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -12,17 +12,17 @@ contract DataTypes { } struct ElementInclusionProof { - uint blockIndex; // index in blocks array (first block has blockNumber of 0) - BlockHeader blockHeader; - uint indexInBlock; // used to verify inclusion of the element in elementsMerkleRoot + uint batchIndex; // index in batches array (first batch has batchNumber of 0) + BatchHeader batchHeader; + uint indexInBatch; // used to verify inclusion of the element in elementsMerkleRoot bytes32[] siblings; // used to verify inclusion of the element in elementsMerkleRoot } - struct BlockHeader { + struct BatchHeader { uint timestamp; bool isL1ToL2Tx; bytes32 elementsMerkleRoot; - uint numElementsInBlock; + uint numElementsInBatch; uint cumulativePrevElements; } } diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index 81057f3de6207..2addadd400994 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -8,11 +8,9 @@ import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; contract RollupQueue { // How many elements in total have been appended uint public cumulativeNumElements; - - // List of block header hashes - bytes32[] public blocks; - - uint256 public front; //Index of the first blockHeaderHash in the list + // List of batch header hashes + bytes32[] public batches; + uint256 public front; //Index of the first batchHeaderHash in the list // The Rollup Merkle Tree library (currently a contract for ease of testing) RollupMerkleUtils merkleUtils; @@ -24,44 +22,44 @@ contract RollupQueue { merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); front = 0; } - // for testing: returns length of block list - function getBlocksLength() public view returns (uint) { - return blocks.length; + // for testing: returns length of batch list + function getBatchesLength() public view returns (uint) { + return batches.length; } function authenticateEnqueue(address _sender) public view returns (bool) { return true; } function authenticateDequeue(address _sender) public view returns (bool) { return true; } - // appends to the current list of blocks - function enqueueBlock(bytes[] memory _rollupBlock) public { + // appends to the current list of batches + function enqueueBatch(bytes[] memory _rollupBatch) public { //Check that msg.sender is authorized to append require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); - require(_rollupBlock.length > 0, "Cannot submit an empty block"); - // calculate block header - bytes32 blockHeaderHash = keccak256( + require(_rollupBatch.length > 0, "Cannot submit an empty batch"); + // calculate batch header + bytes32 batchHeaderHash = keccak256( abi.encodePacked( - merkleUtils.getMerkleRoot(_rollupBlock), // elementsMerkleRoot - _rollupBlock.length // numElementsInBlock + merkleUtils.getMerkleRoot(_rollupBatch), // elementsMerkleRoot + _rollupBatch.length // numElementsInBatch ) ); - // store block header - blocks.push(blockHeaderHash); + // store batch header + batches.push(batchHeaderHash); // update cumulative elements - cumulativeNumElements += _rollupBlock.length; + cumulativeNumElements += _rollupBatch.length; } - // dequeues all blocks including and before the given block index - function dequeueBeforeInclusive(uint _blockIndex) public { + // dequeues all batches including and before the given batch index + function dequeueBeforeInclusive(uint _batchIndex) public { //Check that msg.sender is authorized to delete require(authenticateDequeue(msg.sender), "Message sender does not have permission to dequeue"); - //blockIndex is between first and last blocks - require(_blockIndex >= front && _blockIndex < blocks.length, "Cannot delete blocks outside of valid range"); - //delete all block headers before and including blockIndex - for (uint i = front; i <= _blockIndex; i++) { - delete blocks[i]; + //batchIndex is between first and last batches + require(_batchIndex >= front && _batchIndex < batches.length, "Cannot delete batches outside of valid range"); + //delete all batch headers before and including batchIndex + for (uint i = front; i <= _batchIndex; i++) { + delete batches[i]; } //keep track of new head of list - front = _blockIndex + 1; - // Note: keep in mind that front can point to a non-existent block if the list is empty. + front = _batchIndex + 1; + // Note: keep in mind that front can point to a non-existent batch if the list is empty. } } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index dedf4c906bbee..10ba26bfad255 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -5,7 +5,7 @@ import { getLogger } from '@eth-optimism/core-utils' import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' /* Internal Imports */ -import { DefaultRollupBlock } from './RLhelper' +import { DefaultRollupBatch } from './RLhelper' /* Logging */ const log = getLogger('rollup-tx-queue', true) @@ -40,163 +40,163 @@ describe('CanonicalTransactionChain', () => { ) }) - const enqueueAndGenerateBlock = async ( - block: string[], + const enqueueAndGenerateBatch = async ( + batch: string[], timestamp: number, - blockIndex: number, + batchIndex: number, cumulativePrevElements: number - ): Promise => { - // Submit the rollup block on-chain + ): Promise => { + // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(block, timestamp) - // Generate a local version of the rollup block - const localBlock = new DefaultRollupBlock( + .appendTransactionBatch(batch, timestamp) + // Generate a local version of the rollup batch + const localBatch = new DefaultRollupBatch( timestamp, false, - blockIndex, + batchIndex, cumulativePrevElements, - block + batch ) - await localBlock.generateTree() - return localBlock + await localBatch.generateTree() + return localBatch } /* - * Test enqueueBlock() + * Test enqueueBatch() */ describe('appendTransactionBatch() ', async () => { it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { - const block = ['0x1234', '0x1234'] + const batch = ['0x1234', '0x1234'] const timestamp = 0 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(block, timestamp) // Did not throw... success! + .appendTransactionBatch(batch, timestamp) // Did not throw... success! }) - it('should throw if submitting an empty block', async () => { - const emptyBlock = [] + it('should throw if submitting an empty batch', async () => { + const emptyBatch = [] const timestamp = 0 try { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(emptyBlock, timestamp) + .appendTransactionBatch(emptyBatch, timestamp) } catch (err) { // Success we threw an error! return } - throw new Error('Allowed an empty block to be appended') + throw new Error('Allowed an empty batch to be appended') }) - it('should add to blocks array', async () => { - const block = ['0x1234', '0x6578'] + it('should add to batches array', async () => { + const batch = ['0x1234', '0x6578'] const timestamp = 0 const output = await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(block, timestamp) - log.debug('enqueue block output', JSON.stringify(output)) - const blocksLength = await canonicalTxChain.getBlocksLength() - blocksLength.toNumber().should.equal(1) + .appendTransactionBatch(batch, timestamp) + log.debug('enqueue batch output', JSON.stringify(output)) + const batchesLength = await canonicalTxChain.getBatchsLength() + batchesLength.toNumber().should.equal(1) }) it('should update cumulativeNumElements correctly', async () => { - const block = ['0x1234', '0x5678'] + const batch = ['0x1234', '0x5678'] const timestamp = 0 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(block, timestamp) + .appendTransactionBatch(batch, timestamp) const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(2) }) it('should allow appendTransactionBatch from sequencer', async () => { - const block = ['0x1234', '0x6578'] + const batch = ['0x1234', '0x6578'] const timestamp = 0 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(block, timestamp) // Did not throw... success! + .appendTransactionBatch(batch, timestamp) // Did not throw... success! }) it('should not allow appendTransactionBatch from other address', async () => { - const block = ['0x1234', '0x6578'] + const batch = ['0x1234', '0x6578'] const timestamp = 0 await canonicalTxChain - .appendTransactionBatch(block, timestamp) + .appendTransactionBatch(batch, timestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' ) }) - it('should calculate blockHeaderHash correctly', async () => { - const block = ['0x1234', '0x5678'] - const blockIndex = 0 + it('should calculate batchHeaderHash correctly', async () => { + const batch = ['0x1234', '0x5678'] + const batchIndex = 0 const cumulativePrevElements = 0 const timestamp = 0 - const localBlock = await enqueueAndGenerateBlock( - block, + const localBatch = await enqueueAndGenerateBatch( + batch, timestamp, - blockIndex, + batchIndex, cumulativePrevElements ) - //Check blockHeaderHash - const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - const calculatedBlockHeaderHash = await canonicalTxChain.blocks(0) - calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + //Check batchHeaderHash + const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + const calculatedBatchHeaderHash = await canonicalTxChain.batches(0) + calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) }) - it('should add multiple blocks correctly', async () => { - const block = ['0x1234', '0x5678'] - const numBlocks = 10 - for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { - const timestamp = blockIndex - const cumulativePrevElements = block.length * blockIndex - const localBlock = await enqueueAndGenerateBlock( - block, + it('should add multiple batches correctly', async () => { + const batch = ['0x1234', '0x5678'] + const numBatchs = 10 + for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { + const timestamp = batchIndex + const cumulativePrevElements = batch.length * batchIndex + const localBatch = await enqueueAndGenerateBatch( + batch, timestamp, - blockIndex, + batchIndex, cumulativePrevElements ) - //Check blockHeaderHash - const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - const calculatedBlockHeaderHash = await canonicalTxChain.blocks( - blockIndex + //Check batchHeaderHash + const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + const calculatedBatchHeaderHash = await canonicalTxChain.batches( + batchIndex ) - calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) } //check cumulativeNumElements const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() - cumulativeNumElements.toNumber().should.equal(numBlocks * block.length) - //check blocks length - const blocksLength = await canonicalTxChain.getBlocksLength() - blocksLength.toNumber().should.equal(numBlocks) + cumulativeNumElements.toNumber().should.equal(numBatchs * batch.length) + //check batches length + const batchesLength = await canonicalTxChain.getBatchsLength() + batchesLength.toNumber().should.equal(numBatchs) }) //TODO test with actual transitions and actual state roots - //TODO test above with multiple blocks with different # elements and different size elements + //TODO test above with multiple batches with different # elements and different size elements }) /* * Test verifyElement() */ describe('verifyElement() ', async () => { - it('should return true for valid elements for different blockIndexs', async () => { - const maxBlockNumber = 5 - const minBlockNumber = 0 - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + it('should return true for valid elements for different batchIndexes', async () => { + const maxBatchNumber = 5 + const minBatchNumber = 0 + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] for ( - let blockIndex = minBlockNumber; - blockIndex < maxBlockNumber + 1; - blockIndex++ + let batchIndex = minBatchNumber; + batchIndex < maxBatchNumber + 1; + batchIndex++ ) { - log.debug(`testing valid proof for block #: ${blockIndex}`) - const timestamp = blockIndex - const cumulativePrevElements = block.length * blockIndex - const localBlock = await enqueueAndGenerateBlock( - block, + log.debug(`testing valid proof for batch #: ${batchIndex}`) + const timestamp = batchIndex + const cumulativePrevElements = batch.length * batchIndex + const localBatch = await enqueueAndGenerateBatch( + batch, timestamp, - blockIndex, + batchIndex, cumulativePrevElements ) // Create inclusion proof for the element at elementIndex const elementIndex = 3 - const element = block[elementIndex] - const position = localBlock.getPosition(elementIndex) - const elementInclusionProof = await localBlock.getElementInclusionProof( + const element = batch[elementIndex] + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( elementIndex ) log.debug( @@ -216,21 +216,21 @@ describe('CanonicalTransactionChain', () => { } }) - it('should return false for wrong position with wrong indexInBlock', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + it('should return false for wrong position with wrong indexInBatch', async () => { + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] const cumulativePrevElements = 0 - const blockIndex = 0 + const batchIndex = 0 const timestamp = 0 - const localBlock = await enqueueAndGenerateBlock( - block, + const localBatch = await enqueueAndGenerateBatch( + batch, timestamp, - blockIndex, + batchIndex, cumulativePrevElements ) const elementIndex = 1 - const element = block[elementIndex] - const position = localBlock.getPosition(elementIndex) - const elementInclusionProof = await localBlock.getElementInclusionProof( + const element = batch[elementIndex] + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( elementIndex ) log.debug( @@ -251,28 +251,28 @@ describe('CanonicalTransactionChain', () => { isIncluded.should.equal(false) }) - it('should return false for wrong position and matching indexInBlock', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0xabcd'] + it('should return false for wrong position and matching indexInBatch', async () => { + const batch = ['0x1234', '0x4567', '0x890a', '0xabcd'] const cumulativePrevElements = 0 - const blockIndex = 0 + const batchIndex = 0 const timestamp = 0 - const localBlock = await enqueueAndGenerateBlock( - block, + const localBatch = await enqueueAndGenerateBatch( + batch, timestamp, - blockIndex, + batchIndex, cumulativePrevElements ) //generate inclusion proof const elementIndex = 1 - const element = block[elementIndex] - const position = localBlock.getPosition(elementIndex) - const elementInclusionProof = await localBlock.getElementInclusionProof( + const element = batch[elementIndex] + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( elementIndex ) //Give wrong position so inclusion proof is wrong const wrongPosition = position + 1 //Change index to also be false (so position = index + cumulative) - elementInclusionProof.indexInBlock++ + elementInclusionProof.indexInBatch++ log.debug( `trying to falsely verify this inclusion proof: ${JSON.stringify( elementInclusionProof diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index 2eb10d8a25756..41b392e9daf70 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -5,7 +5,7 @@ import { getLogger } from '@eth-optimism/core-utils' import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' /* Internal Imports */ -import { DefaultRollupBlock } from './RLhelper' +import { DefaultRollupBatch } from './RLhelper' /* Logging */ const log = getLogger('l1-to-l2-tx-queue', true) @@ -49,49 +49,49 @@ describe('L1ToL2TransactionQueue', () => { }) /* - * Test enqueueBlock() + * Test enqueueBatch() */ - describe('enqueueBlock() ', async () => { + describe('enqueueBatch() ', async () => { it('should allow enqueue from l1ToL2TransactionPasser', async () => { - const block = ['0x1234'] - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) // Did not throw... success! + const batch = ['0x1234'] + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) // Did not throw... success! }) it('should not allow enqueue from other address', async () => { - const block = ['0x1234'] + const batch = ['0x1234'] await l1ToL2TxQueue - .enqueueBlock(block) + .enqueueBatch(batch) .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' ) }) }) /* - * Test dequeueBlock() + * Test dequeueBatch() */ - describe('dequeueBlock() ', async () => { + describe('dequeueBatch() ', async () => { it('should allow dequeue from canonicalTransactionChain', async () => { - const block = ['0x1234'] + const batch = ['0x1234'] const cumulativePrevElements = 0 - const blockIndex = 0 - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) - let blocksLength = await l1ToL2TxQueue.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) + const batchIndex = 0 + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) + let batchesLength = await l1ToL2TxQueue.getBatchesLength() + log.debug(`batchesLength before deletion: ${batchesLength}`) let front = await l1ToL2TxQueue.front() log.debug(`front before deletion: ${front}`) - let firstBlockHash = await l1ToL2TxQueue.blocks(0) - log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) + let firstBatchHash = await l1ToL2TxQueue.batches(0) + log.debug(`firstBatchHash before deletion: ${firstBatchHash}`) - // delete the single appended block + // delete the single appended batch await l1ToL2TxQueue .connect(canonicalTransactionChain) - .dequeueBeforeInclusive(blockIndex) + .dequeueBeforeInclusive(batchIndex) - blocksLength = await l1ToL2TxQueue.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(1) - firstBlockHash = await l1ToL2TxQueue.blocks(0) - log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) - firstBlockHash.should.equal( + batchesLength = await l1ToL2TxQueue.getBatchesLength() + log.debug(`batchesLength after deletion: ${batchesLength}`) + batchesLength.should.equal(1) + firstBatchHash = await l1ToL2TxQueue.batches(0) + log.debug(`firstBatchHash after deletion: ${firstBatchHash}`) + firstBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) front = await l1ToL2TxQueue.front() @@ -99,12 +99,12 @@ describe('L1ToL2TransactionQueue', () => { front.should.equal(1) }) it('should not allow dequeue from other address', async () => { - const block = ['0x1234'] + const batch = ['0x1234'] const cumulativePrevElements = 0 - const blockIndex = 0 - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBlock(block) + const batchIndex = 0 + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) await l1ToL2TxQueue - .dequeueBeforeInclusive(blockIndex) + .dequeueBeforeInclusive(batchIndex) .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' ) diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 024999516aae0..62e4c37d2aad6 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -10,44 +10,44 @@ import { newInMemoryDB, SparseMerkleTreeImpl } from '@eth-optimism/core-db' import { utils } from 'ethers' -interface BlockHeader { +interface BatchHeader { timestamp: number isL1ToL2Tx: boolean elementsMerkleRoot: string - numElementsInBlock: number + numElementsInBatch: number cumulativePrevElements: number } interface ElementInclusionProof { - blockIndex: number - blockHeader: BlockHeader - indexInBlock: number + batchIndex: number + batchHeader: BatchHeader + indexInBatch: number siblings: string[] } /* * Helper class which provides all information requried for a particular - * Rollup block. This includes all of the tranisitions in readable form + * Rollup batch. This includes all of the tranisitions in readable form * as well as the merkle tree which it generates. */ -export class DefaultRollupBlock { +export class DefaultRollupBatch { public timestamp: number public isL1ToL2Tx: boolean - public blockIndex: number //index in - public cumulativePrevElements: number //in blockHeader - public elements: string[] //Rollup block + public batchIndex: number //index in + public cumulativePrevElements: number //in batchHeader + public elements: string[] //Rollup batch public elementsMerkleTree: SparseMerkleTreeImpl constructor( - timestamp: number, // Ethereum block this block was submitted in + timestamp: number, // Ethereum batch this batch was submitted in isL1ToL2Tx: boolean, - blockIndex: number, // index in blocks array (first block has blockIndex of 0) + batchIndex: number, // index in batchs array (first batch has batchIndex of 0) cumulativePrevElements: number, elements: string[] ) { this.isL1ToL2Tx = isL1ToL2Tx this.timestamp = timestamp - this.blockIndex = blockIndex + this.batchIndex = batchIndex this.cumulativePrevElements = cumulativePrevElements this.elements = elements } @@ -78,22 +78,22 @@ export class DefaultRollupBlock { } /* - * elementIndex is the index in this block of the element + * elementIndex is the index in this batch of the element * that we want to get the siblings of */ public async getSiblings(elementIndex: number): Promise { - const blockInclusion = await this.elementsMerkleTree.getMerkleProof( + const batchInclusion = await this.elementsMerkleTree.getMerkleProof( new BigNumber(elementIndex), hexStrToBuf(this.elements[elementIndex]) ) - const path = bufToHexString(blockInclusion.key.toBuffer('B', 32)) - const siblings = blockInclusion.siblings.map((sibBuf) => + const path = bufToHexString(batchInclusion.key.toBuffer('B', 32)) + const siblings = batchInclusion.siblings.map((sibBuf) => bufToHexString(sibBuf) ) return siblings } - public async hashBlockHeader(): Promise { + public async hashBatchHeader(): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() return utils.solidityKeccak256( ['uint', 'bool', 'bytes32', 'uint', 'uint'], @@ -108,7 +108,7 @@ export class DefaultRollupBlock { } /* - * elementIndex is the index in this block of the element + * elementIndex is the index in this batch of the element * that we want to create an inclusion proof for. */ @@ -117,26 +117,26 @@ export class DefaultRollupBlock { ): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() return { - blockIndex: this.blockIndex, - blockHeader: { + batchIndex: this.batchIndex, + batchHeader: { timestamp: this.timestamp, isL1ToL2Tx: this.isL1ToL2Tx, elementsMerkleRoot: bufToHexString(bufferRoot), - numElementsInBlock: this.elements.length, + numElementsInBatch: this.elements.length, cumulativePrevElements: this.cumulativePrevElements, }, - indexInBlock: elementIndex, + indexInBatch: elementIndex, siblings: await this.getSiblings(elementIndex), } } } /* * Helper class which provides all information requried for a particular - * Rollup block. This includes all of the tranisitions in readable form + * Rollup batch. This includes all of the tranisitions in readable form * as well as the merkle tree which it generates. */ export class RollupQueueBatch { - public elements: string[] //Rollup block + public elements: string[] //Rollup batch public elementsMerkleTree: SparseMerkleTreeImpl constructor(elements: string[]) { @@ -161,7 +161,7 @@ export class RollupQueueBatch { } } - public async hashBlockHeader(): Promise { + public async hashBatchHeader(): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() return utils.solidityKeccak256( ['bytes32', 'uint'], diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 2d4a78243839a..02f8cd9d6db55 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -40,99 +40,99 @@ describe('RollupQueue', () => { ) }) - const enqueueAndGenerateBlock = async ( - block: string[] + const enqueueAndGenerateBatch = async ( + batch: string[] ): Promise => { - // Submit the rollup block on-chain - await rollupQueue.enqueueBlock(block) - // Generate a local version of the rollup block - const localBlock = new RollupQueueBatch(block) - await localBlock.generateTree() - return localBlock + // Submit the rollup batch on-chain + await rollupQueue.enqueueBatch(batch) + // Generate a local version of the rollup batch + const localBatch = new RollupQueueBatch(batch) + await localBatch.generateTree() + return localBatch } /* - * Test enqueueBlock() + * Test enqueueBatch() */ - describe('enqueueBlock() ', async () => { + describe('enqueueBatch() ', async () => { it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { - const block = ['0x1234', '0x1234'] - await rollupQueue.enqueueBlock(block) // Did not throw... success! + const batch = ['0x1234', '0x1234'] + await rollupQueue.enqueueBatch(batch) // Did not throw... success! }) - it('should throw if submitting an empty block', async () => { - const emptyBlock = [] + it('should throw if submitting an empty batch', async () => { + const emptyBatch = [] try { - await rollupQueue.enqueueBlock(emptyBlock) + await rollupQueue.enqueueBatch(emptyBatch) } catch (err) { // Success we threw an error! return } - throw new Error('Allowed an empty block to be appended') + throw new Error('Allowed an empty batch to be appended') }) - it('should add to blocks array', async () => { - const block = ['0x1234', '0x6578'] - const output = await rollupQueue.enqueueBlock(block) - log.debug('enqueue block output', JSON.stringify(output)) - const blocksLength = await rollupQueue.getBlocksLength() - blocksLength.toNumber().should.equal(1) + it('should add to batches array', async () => { + const batch = ['0x1234', '0x6578'] + const output = await rollupQueue.enqueueBatch(batch) + log.debug('enqueue batch output', JSON.stringify(output)) + const batchesLength = await rollupQueue.getBatchesLength() + batchesLength.toNumber().should.equal(1) }) it('should update cumulativeNumElements correctly', async () => { - const block = ['0x1234', '0x5678'] - await rollupQueue.enqueueBlock(block) + const batch = ['0x1234', '0x5678'] + await rollupQueue.enqueueBatch(batch) const cumulativeNumElements = await rollupQueue.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(2) }) - it('should calculate blockHeaderHash correctly', async () => { - const block = ['0x1234', '0x5678'] - const localBlock = await enqueueAndGenerateBlock(block) - //Check blockHeaderHash - const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - const calculatedBlockHeaderHash = await rollupQueue.blocks(0) - calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + it('should calculate batchHeaderHash correctly', async () => { + const batch = ['0x1234', '0x5678'] + const localBatch = await enqueueAndGenerateBatch(batch) + //Check batchHeaderHash + const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + const calculatedBatchHeaderHash = await rollupQueue.batches(0) + calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) }) - it('should add multiple blocks correctly', async () => { - const block = ['0x1234', '0x5678'] - const numBlocks = 10 - for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { - const cumulativePrevElements = block.length * blockIndex - const localBlock = await enqueueAndGenerateBlock(block) - //Check blockHeaderHash - const expectedBlockHeaderHash = await localBlock.hashBlockHeader() - const calculatedBlockHeaderHash = await rollupQueue.blocks(blockIndex) - calculatedBlockHeaderHash.should.equal(expectedBlockHeaderHash) + it('should add multiple batches correctly', async () => { + const batch = ['0x1234', '0x5678'] + const numBatches = 10 + for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { + const cumulativePrevElements = batch.length * batchIndex + const localBatch = await enqueueAndGenerateBatch(batch) + //Check batchHeaderHash + const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + const calculatedBatchHeaderHash = await rollupQueue.batches(batchIndex) + calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) } - //check blocks length - const blocksLength = await rollupQueue.getBlocksLength() - blocksLength.toNumber().should.equal(numBlocks) + //check batches length + const batchesLength = await rollupQueue.getBatchesLength() + batchesLength.toNumber().should.equal(numBatches) }) }) describe('dequeueBeforeInclusive()', async () => { - it('should dequeue single block', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + it('should dequeue single batch', async () => { + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] const cumulativePrevElements = 0 - const blockIndex = 0 - const localBlock = await enqueueAndGenerateBlock(block) - let blocksLength = await rollupQueue.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) + const batchIndex = 0 + const localBatch = await enqueueAndGenerateBatch(batch) + let batchesLength = await rollupQueue.getBatchesLength() + log.debug(`batchesLength before deletion: ${batchesLength}`) let front = await rollupQueue.front() log.debug(`front before deletion: ${front}`) - let firstBlockHash = await rollupQueue.blocks(0) - log.debug(`firstBlockHash before deletion: ${firstBlockHash}`) - - // delete the single appended block - await rollupQueue.dequeueBeforeInclusive(blockIndex) - - blocksLength = await rollupQueue.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(1) - firstBlockHash = await rollupQueue.blocks(0) - log.debug(`firstBlockHash after deletion: ${firstBlockHash}`) - firstBlockHash.should.equal( + let firstBatchHash = await rollupQueue.batches(0) + log.debug(`firstBatchHash before deletion: ${firstBatchHash}`) + + // delete the single appended batch + await rollupQueue.dequeueBeforeInclusive(batchIndex) + + batchesLength = await rollupQueue.getBatchesLength() + log.debug(`batchesLength after deletion: ${batchesLength}`) + batchesLength.should.equal(1) + firstBatchHash = await rollupQueue.batches(0) + log.debug(`firstBatchHash after deletion: ${firstBatchHash}`) + firstBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) front = await rollupQueue.front() @@ -140,34 +140,34 @@ describe('RollupQueue', () => { front.should.equal(1) }) - it('should dequeue many blocks', async () => { - const block = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBlocks = [] - const numBlocks = 5 - for (let blockIndex = 0; blockIndex < numBlocks; blockIndex++) { - const cumulativePrevElements = block.length * blockIndex - const localBlock = await enqueueAndGenerateBlock(block) - localBlocks.push(localBlock) + it('should dequeue many batches', async () => { + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const localBatches = [] + const numBatches = 5 + for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { + const cumulativePrevElements = batch.length * batchIndex + const localBatch = await enqueueAndGenerateBatch(batch) + localBatches.push(localBatch) } - let blocksLength = await rollupQueue.getBlocksLength() - log.debug(`blocksLength before deletion: ${blocksLength}`) + let batchesLength = await rollupQueue.getBatchesLength() + log.debug(`batchesLength before deletion: ${batchesLength}`) let front = await rollupQueue.front() log.debug(`front before deletion: ${front}`) - for (let i = 0; i < numBlocks; i++) { - const ithBlockHash = await rollupQueue.blocks(i) - log.debug(`blockHash #${i} before deletion: ${ithBlockHash}`) + for (let i = 0; i < numBatches; i++) { + const ithBatchHash = await rollupQueue.batches(i) + log.debug(`batchHash #${i} before deletion: ${ithBatchHash}`) } - await rollupQueue.dequeueBeforeInclusive(numBlocks - 1) - blocksLength = await rollupQueue.getBlocksLength() - log.debug(`blocksLength after deletion: ${blocksLength}`) - blocksLength.should.equal(numBlocks) + await rollupQueue.dequeueBeforeInclusive(numBatches - 1) + batchesLength = await rollupQueue.getBatchesLength() + log.debug(`batchesLength after deletion: ${batchesLength}`) + batchesLength.should.equal(numBatches) front = await rollupQueue.front() log.debug(`front after deletion: ${front}`) - front.should.equal(numBlocks) - for (let i = 0; i < numBlocks; i++) { - const ithBlockHash = await rollupQueue.blocks(i) - log.debug(`blockHash #${i} after deletion: ${ithBlockHash}`) - ithBlockHash.should.equal( + front.should.equal(numBatches) + for (let i = 0; i < numBatches; i++) { + const ithBatchHash = await rollupQueue.batches(i) + log.debug(`batchHash #${i} after deletion: ${ithBatchHash}`) + ithBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) } From a4027766ae7b93ab82e8ee4de5a525391b2214fc Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 18 May 2020 14:56:41 -0400 Subject: [PATCH 08/37] check messages for erroring tests --- .../rollup-list/CanonicalTransactionChain.spec.ts | 15 ++++++--------- .../test/rollup-list/RollupQueue.spec.ts | 12 +++++------- 2 files changed, 11 insertions(+), 16 deletions(-) diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 10ba26bfad255..c4bb000ed54da 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -77,15 +77,12 @@ describe('CanonicalTransactionChain', () => { it('should throw if submitting an empty batch', async () => { const emptyBatch = [] const timestamp = 0 - try { - await canonicalTxChain - .connect(sequencer) - .appendTransactionBatch(emptyBatch, timestamp) - } catch (err) { - // Success we threw an error! - return - } - throw new Error('Allowed an empty batch to be appended') + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(emptyBatch, timestamp) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot submit an empty batch' + ) }) it('should add to batches array', async () => { diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 02f8cd9d6db55..62ead6d11db79 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -61,13 +61,11 @@ describe('RollupQueue', () => { it('should throw if submitting an empty batch', async () => { const emptyBatch = [] - try { - await rollupQueue.enqueueBatch(emptyBatch) - } catch (err) { - // Success we threw an error! - return - } - throw new Error('Allowed an empty batch to be appended') + await rollupQueue + .enqueueBatch(emptyBatch) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot submit an empty batch' + ) }) it('should add to batches array', async () => { From 004d7009e8afe53ebee7e717e8ddfdbe7b97dee4 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 18 May 2020 16:41:41 -0400 Subject: [PATCH 09/37] add timestamp to RollupQueue batches --- packages/rollup-contracts/contracts/DataTypes.sol | 5 +++++ .../rollup-contracts/contracts/RollupQueue.sol | 5 +++-- .../rollup-list/L1ToL2TransactionQueue.spec.ts | 2 +- .../test/rollup-list/RollupQueue.spec.ts | 15 +++++++++------ 4 files changed, 18 insertions(+), 9 deletions(-) diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index 835ed72256ae0..dc4d4bf644cfb 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -25,4 +25,9 @@ contract DataTypes { uint numElementsInBatch; uint cumulativePrevElements; } + + struct TimestampedHash { + uint timestamp; + bytes32 batchHeaderHash; + } } diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index 2addadd400994..70f215bce802d 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -9,7 +9,7 @@ contract RollupQueue { // How many elements in total have been appended uint public cumulativeNumElements; // List of batch header hashes - bytes32[] public batches; + dt.TimestampedHash[] public batches; uint256 public front; //Index of the first batchHeaderHash in the list // The Rollup Merkle Tree library (currently a contract for ease of testing) @@ -42,8 +42,9 @@ contract RollupQueue { _rollupBatch.length // numElementsInBatch ) ); + dt.TimestampedHash memory timestampedBatchHeaderHash = dt.TimestampedHash(now, batchHeaderHash); // store batch header - batches.push(batchHeaderHash); + batches.push(timestampedBatchHeaderHash); // update cumulative elements cumulativeNumElements += _rollupBatch.length; } diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index 41b392e9daf70..f98f8e379ce0c 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -89,7 +89,7 @@ describe('L1ToL2TransactionQueue', () => { batchesLength = await l1ToL2TxQueue.getBatchesLength() log.debug(`batchesLength after deletion: ${batchesLength}`) batchesLength.should.equal(1) - firstBatchHash = await l1ToL2TxQueue.batches(0) + firstBatchHash = (await l1ToL2TxQueue.batches(0)).batchHeaderHash log.debug(`firstBatchHash after deletion: ${firstBatchHash}`) firstBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 62ead6d11db79..c160eb21312fd 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -88,7 +88,8 @@ describe('RollupQueue', () => { const localBatch = await enqueueAndGenerateBatch(batch) //Check batchHeaderHash const expectedBatchHeaderHash = await localBatch.hashBatchHeader() - const calculatedBatchHeaderHash = await rollupQueue.batches(0) + const calculatedBatchHeaderHash = (await rollupQueue.batches(0)) + .batchHeaderHash calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) }) @@ -100,7 +101,9 @@ describe('RollupQueue', () => { const localBatch = await enqueueAndGenerateBatch(batch) //Check batchHeaderHash const expectedBatchHeaderHash = await localBatch.hashBatchHeader() - const calculatedBatchHeaderHash = await rollupQueue.batches(batchIndex) + const calculatedBatchHeaderHash = ( + await rollupQueue.batches(batchIndex) + ).batchHeaderHash calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) } //check batches length @@ -119,7 +122,7 @@ describe('RollupQueue', () => { log.debug(`batchesLength before deletion: ${batchesLength}`) let front = await rollupQueue.front() log.debug(`front before deletion: ${front}`) - let firstBatchHash = await rollupQueue.batches(0) + let firstBatchHash = (await rollupQueue.batches(0)).batchHeaderHash log.debug(`firstBatchHash before deletion: ${firstBatchHash}`) // delete the single appended batch @@ -128,7 +131,7 @@ describe('RollupQueue', () => { batchesLength = await rollupQueue.getBatchesLength() log.debug(`batchesLength after deletion: ${batchesLength}`) batchesLength.should.equal(1) - firstBatchHash = await rollupQueue.batches(0) + firstBatchHash = (await rollupQueue.batches(0)).batchHeaderHash log.debug(`firstBatchHash after deletion: ${firstBatchHash}`) firstBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' @@ -152,7 +155,7 @@ describe('RollupQueue', () => { let front = await rollupQueue.front() log.debug(`front before deletion: ${front}`) for (let i = 0; i < numBatches; i++) { - const ithBatchHash = await rollupQueue.batches(i) + const ithBatchHash = (await rollupQueue.batches(i)).batchHeaderHash log.debug(`batchHash #${i} before deletion: ${ithBatchHash}`) } await rollupQueue.dequeueBeforeInclusive(numBatches - 1) @@ -163,7 +166,7 @@ describe('RollupQueue', () => { log.debug(`front after deletion: ${front}`) front.should.equal(numBatches) for (let i = 0; i < numBatches; i++) { - const ithBatchHash = await rollupQueue.batches(i) + const ithBatchHash = (await rollupQueue.batches(i)).batchHeaderHash log.debug(`batchHash #${i} after deletion: ${ithBatchHash}`) ithBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' From 81d109957f79751344e8b0fad6be1ccc31922153 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 18 May 2020 17:26:29 -0400 Subject: [PATCH 10/37] alter dequeue to only support single batch dequeue --- .../contracts/RollupQueue.sol | 17 ++----- .../CanonicalTransactionChain.spec.ts | 1 - .../L1ToL2TransactionQueue.spec.ts | 6 +-- .../test/rollup-list/RollupQueue.spec.ts | 49 +++++-------------- 4 files changed, 18 insertions(+), 55 deletions(-) diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index 70f215bce802d..d74003295d630 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -30,7 +30,7 @@ contract RollupQueue { function authenticateEnqueue(address _sender) public view returns (bool) { return true; } function authenticateDequeue(address _sender) public view returns (bool) { return true; } - // appends to the current list of batches + // enqueues to the end of the current queue of batches function enqueueBatch(bytes[] memory _rollupBatch) public { //Check that msg.sender is authorized to append require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); @@ -49,18 +49,11 @@ contract RollupQueue { cumulativeNumElements += _rollupBatch.length; } - // dequeues all batches including and before the given batch index - function dequeueBeforeInclusive(uint _batchIndex) public { - //Check that msg.sender is authorized to delete + // dequeues the first (oldest) batch + function dequeueBatch() public { require(authenticateDequeue(msg.sender), "Message sender does not have permission to dequeue"); - //batchIndex is between first and last batches - require(_batchIndex >= front && _batchIndex < batches.length, "Cannot delete batches outside of valid range"); - //delete all batch headers before and including batchIndex - for (uint i = front; i <= _batchIndex; i++) { - delete batches[i]; - } - //keep track of new head of list - front = _batchIndex + 1; + delete batches[front]; + front++; // Note: keep in mind that front can point to a non-existent batch if the list is empty. } } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index c4bb000ed54da..0076996c96bc7 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -91,7 +91,6 @@ describe('CanonicalTransactionChain', () => { const output = await canonicalTxChain .connect(sequencer) .appendTransactionBatch(batch, timestamp) - log.debug('enqueue batch output', JSON.stringify(output)) const batchesLength = await canonicalTxChain.getBatchsLength() batchesLength.toNumber().should.equal(1) }) diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index f98f8e379ce0c..de046c9c98ec8 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -82,9 +82,7 @@ describe('L1ToL2TransactionQueue', () => { log.debug(`firstBatchHash before deletion: ${firstBatchHash}`) // delete the single appended batch - await l1ToL2TxQueue - .connect(canonicalTransactionChain) - .dequeueBeforeInclusive(batchIndex) + await l1ToL2TxQueue.connect(canonicalTransactionChain).dequeueBatch() batchesLength = await l1ToL2TxQueue.getBatchesLength() log.debug(`batchesLength after deletion: ${batchesLength}`) @@ -104,7 +102,7 @@ describe('L1ToL2TransactionQueue', () => { const batchIndex = 0 await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) await l1ToL2TxQueue - .dequeueBeforeInclusive(batchIndex) + .dequeueBatch() .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' ) diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index c160eb21312fd..d6c509e74f577 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -71,7 +71,6 @@ describe('RollupQueue', () => { it('should add to batches array', async () => { const batch = ['0x1234', '0x6578'] const output = await rollupQueue.enqueueBatch(batch) - log.debug('enqueue batch output', JSON.stringify(output)) const batchesLength = await rollupQueue.getBatchesLength() batchesLength.toNumber().should.equal(1) }) @@ -112,66 +111,40 @@ describe('RollupQueue', () => { }) }) - describe('dequeueBeforeInclusive()', async () => { + describe('dequeueBatch()', async () => { it('should dequeue single batch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const cumulativePrevElements = 0 - const batchIndex = 0 const localBatch = await enqueueAndGenerateBatch(batch) - let batchesLength = await rollupQueue.getBatchesLength() - log.debug(`batchesLength before deletion: ${batchesLength}`) - let front = await rollupQueue.front() - log.debug(`front before deletion: ${front}`) - let firstBatchHash = (await rollupQueue.batches(0)).batchHeaderHash - log.debug(`firstBatchHash before deletion: ${firstBatchHash}`) - // delete the single appended batch - await rollupQueue.dequeueBeforeInclusive(batchIndex) + await rollupQueue.dequeueBatch() - batchesLength = await rollupQueue.getBatchesLength() - log.debug(`batchesLength after deletion: ${batchesLength}`) + const batchesLength = await rollupQueue.getBatchesLength() batchesLength.should.equal(1) - firstBatchHash = (await rollupQueue.batches(0)).batchHeaderHash - log.debug(`firstBatchHash after deletion: ${firstBatchHash}`) + const firstBatchHash = (await rollupQueue.batches(0)).batchHeaderHash firstBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) - front = await rollupQueue.front() - log.debug(`front after deletion: ${front}`) + const front = await rollupQueue.front() front.should.equal(1) }) it('should dequeue many batches', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBatches = [] const numBatches = 5 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const cumulativePrevElements = batch.length * batchIndex - const localBatch = await enqueueAndGenerateBatch(batch) - localBatches.push(localBatch) - } - let batchesLength = await rollupQueue.getBatchesLength() - log.debug(`batchesLength before deletion: ${batchesLength}`) - let front = await rollupQueue.front() - log.debug(`front before deletion: ${front}`) - for (let i = 0; i < numBatches; i++) { - const ithBatchHash = (await rollupQueue.batches(i)).batchHeaderHash - log.debug(`batchHash #${i} before deletion: ${ithBatchHash}`) + await enqueueAndGenerateBatch(batch) } - await rollupQueue.dequeueBeforeInclusive(numBatches - 1) - batchesLength = await rollupQueue.getBatchesLength() - log.debug(`batchesLength after deletion: ${batchesLength}`) - batchesLength.should.equal(numBatches) - front = await rollupQueue.front() - log.debug(`front after deletion: ${front}`) - front.should.equal(numBatches) for (let i = 0; i < numBatches; i++) { + await rollupQueue.dequeueBatch() + const front = await rollupQueue.front() + front.should.equal(i + 1) const ithBatchHash = (await rollupQueue.batches(i)).batchHeaderHash - log.debug(`batchHash #${i} after deletion: ${ithBatchHash}`) ithBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) } + const batchesLength = await rollupQueue.getBatchesLength() + batchesLength.should.equal(numBatches) }) }) }) From 5d7f0c97cfe997a0c1e93c90398ee4b104344807 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Tue, 19 May 2020 20:43:49 -0400 Subject: [PATCH 11/37] integrate l1ToL2Queue into canonical chain --- .../contracts/CanonicalTransactionChain.sol | 42 ++++- .../rollup-contracts/contracts/DataTypes.sol | 9 +- .../contracts/RollupQueue.sol | 17 +- .../CanonicalTransactionChain.spec.ts | 151 +++++++++++++++++- .../test/rollup-list/RollupQueue.spec.ts | 24 +++ 5 files changed, 227 insertions(+), 16 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 4ffad3962762f..525247ef565ed 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -4,6 +4,7 @@ pragma experimental ABIEncoderV2; /* Internal Imports */ import {DataTypes as dt} from "./DataTypes.sol"; import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; +import {L1ToL2TransactionQueue} from "./L1ToL2TransactionQueue.sol"; contract CanonicalTransactionChain { // The Rollup Merkle Tree library (currently a contract for ease of testing) @@ -14,14 +15,19 @@ contract CanonicalTransactionChain { uint public cumulativeNumElements; // List of batch header hashes bytes32[] public batches; - + uint public latestOVMTimestamp = 0; + uint sequencerLivenessAssumption; + L1ToL2TransactionQueue public l1ToL2Queue; constructor( address _rollupMerkleUtilsAddress, - address _sequencer + address _sequencer, + address _l1ToL2TransactionPasserAddress ) public { merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); sequencer = _sequencer; + l1ToL2Queue = new L1ToL2TransactionQueue(_rollupMerkleUtilsAddress, _l1ToL2TransactionPasserAddress, address(this)); + sequencerLivenessAssumption = 100000000000000000000000000; // TODO parameterize this } // for testing: returns length of batch list @@ -30,7 +36,7 @@ contract CanonicalTransactionChain { } function hashBatchHeader( - dt.BatchHeader memory _batchHeader + dt.TxChainBatchHeader memory _batchHeader ) public pure returns (bytes32) { return keccak256(abi.encodePacked( _batchHeader.timestamp, @@ -45,10 +51,36 @@ contract CanonicalTransactionChain { return _sender == sequencer; } + function appendL1ToL2Batch(dt.TxQueueBatchHeader memory _batchHeader) public { + // verify header is the next to dequeue for the L1->L2 queue + bytes32 batchHeaderHash = l1ToL2Queue.hashBatchHeader(_batchHeader); + dt.TimestampedHash memory timestampedHash = l1ToL2Queue.getFrontBatch(); + require(batchHeaderHash == timestampedHash.batchHeaderHash, "this aint it chief"); + // if (timestamp + sequencerLivenessAssumption > now) { + // require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); + // } + // require(_timestamp > lastOVMTimestamp, "timestamps must monotonically increase"); + // lastOVMTimestamp = _timestamp; + // // TODO require proposed timestamp is not too far away from currnt timestamp + // // require dist(_timestamp, block.timestamp) < sequencerLivenessAssumption + // // calculate batch header + // bytes32 batchHeaderHash = keccak256(abi.encodePacked( + // _timestamp, + // false, // isL1ToL2Tx + // merkleUtils.getMerkleRoot(_txBatch), // elementsMerkleRoot + // _txBatch.length, // numElementsInBatch + // cumulativeNumElements // cumulativeNumElements + // )); + // // store batch header + // batches.push(batchHeaderHash); + // cumulativeElements += _header.numElementsInBlock; + l1ToL2Queue.dequeueBatch(); + } + // appends to the current list of batches function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { //Check that msg.sender is authorized to append - require(authenticateAppend(msg.sender), "Message sender does not have permission to enqueue"); + require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch"); require(_txBatch.length > 0, "Cannot submit an empty batch"); // require(_timestamp > lastOVMTimestamp, "timestamps must monotonically increase"); @@ -77,7 +109,7 @@ contract CanonicalTransactionChain { dt.ElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch ) public view returns (bool) { // For convenience, store the batchHeader - dt.BatchHeader memory batchHeader = _inclusionProof.batchHeader; + dt.TxChainBatchHeader memory batchHeader = _inclusionProof.batchHeader; // make sure absolute position equivalent to relative positions if(_position != _inclusionProof.indexInBatch + batchHeader.cumulativePrevElements) diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index dc4d4bf644cfb..a8f0d04971ea8 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -13,12 +13,12 @@ contract DataTypes { struct ElementInclusionProof { uint batchIndex; // index in batches array (first batch has batchNumber of 0) - BatchHeader batchHeader; + TxChainBatchHeader batchHeader; uint indexInBatch; // used to verify inclusion of the element in elementsMerkleRoot bytes32[] siblings; // used to verify inclusion of the element in elementsMerkleRoot } - struct BatchHeader { + struct TxChainBatchHeader { uint timestamp; bool isL1ToL2Tx; bytes32 elementsMerkleRoot; @@ -26,6 +26,11 @@ contract DataTypes { uint cumulativePrevElements; } + struct TxQueueBatchHeader { + bytes32 elementsMerkleRoot; + uint numElementsInBatch; + } + struct TimestampedHash { uint timestamp; bytes32 batchHeaderHash; diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index d74003295d630..e79d7054ab732 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -27,6 +27,20 @@ contract RollupQueue { return batches.length; } + function getFrontBatch() public view returns (dt.TimestampedHash memory) { + require(front < batches.length, "Cannot get front batch from an empty queue"); + return batches[front]; + } + + function hashBatchHeader( + dt.TxQueueBatchHeader memory _batchHeader + ) public pure returns (bytes32) { + return keccak256(abi.encodePacked( + _batchHeader.elementsMerkleRoot, + _batchHeader.numElementsInBatch + )); + } + function authenticateEnqueue(address _sender) public view returns (bool) { return true; } function authenticateDequeue(address _sender) public view returns (bool) { return true; } @@ -50,10 +64,11 @@ contract RollupQueue { } // dequeues the first (oldest) batch + // Note: keep in mind that front can point to a non-existent batch if the list is empty. function dequeueBatch() public { require(authenticateDequeue(msg.sender), "Message sender does not have permission to dequeue"); + require(front < batches.length, "Cannot dequeue from an empty queue"); delete batches[front]; front++; - // Note: keep in mind that front can point to a non-existent batch if the list is empty. } } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 0076996c96bc7..364bfdd29a5b5 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -3,21 +3,28 @@ import '../setup' /* External Imports */ import { getLogger } from '@eth-optimism/core-utils' import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' +import { Contract } from 'ethers' /* Internal Imports */ -import { DefaultRollupBatch } from './RLhelper' +import { DefaultRollupBatch, RollupQueueBatch } from './RLhelper' /* Logging */ const log = getLogger('rollup-tx-queue', true) /* Contract Imports */ import * as CanonicalTransactionChain from '../../build/CanonicalTransactionChain.json' +import * as L1ToL2TransactionQueue from '../../build/L1ToL2TransactionQueue.json' import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ describe('CanonicalTransactionChain', () => { const provider = createMockProvider() - const [wallet, sequencer, canonicalTransactionChain] = getWallets(provider) + const [ + wallet, + sequencer, + canonicalTransactionChain, + l1ToL2TransactionPasser, + ] = getWallets(provider) let canonicalTxChain let rollupMerkleUtils @@ -33,7 +40,11 @@ describe('CanonicalTransactionChain', () => { canonicalTxChain = await deployContract( wallet, CanonicalTransactionChain, - [rollupMerkleUtils.address, sequencer.address], + [ + rollupMerkleUtils.address, + sequencer.address, + l1ToL2TransactionPasser.address, + ], { gasLimit: 6700000, } @@ -63,9 +74,9 @@ describe('CanonicalTransactionChain', () => { } /* - * Test enqueueBatch() + * Test appendTransactionBatch() */ - describe('appendTransactionBatch() ', async () => { + describe('appendTransactionBatch()', async () => { it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { const batch = ['0x1234', '0x1234'] const timestamp = 0 @@ -117,7 +128,7 @@ describe('CanonicalTransactionChain', () => { await canonicalTxChain .appendTransactionBatch(batch, timestamp) .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' + 'VM Exception while processing transaction: revert Message sender does not have permission to append a batch' ) }) it('should calculate batchHeaderHash correctly', async () => { @@ -162,8 +173,132 @@ describe('CanonicalTransactionChain', () => { const batchesLength = await canonicalTxChain.getBatchsLength() batchesLength.toNumber().should.equal(numBatchs) }) - //TODO test with actual transitions and actual state roots - //TODO test above with multiple batches with different # elements and different size elements + }) + + describe('appendL1ToL2Batch()', async () => { + let l1ToL2Queue + let localL1ToL2Queue = [] + const enqueueAndGenerateQueueBatch = async ( + batch: string[] + ): Promise => { + // Submit the rollup batch on-chain + await l1ToL2Queue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) + // Generate a local version of the rollup batch + const localBatch = new RollupQueueBatch(batch) + await localBatch.generateTree() + return localBatch + } + beforeEach(async () => { + const batch = ['0x1234', '0x1234'] + const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() + l1ToL2Queue = new Contract( + l1ToL2QueueAddress, + L1ToL2TransactionQueue.abi, + provider + ) + const localBatch = await l1ToL2Queue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) + localL1ToL2Queue.push(localBatch) + }) + + // it('should successfully append a L1ToL2Batch', async () => { + // await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() + // const front = await l1ToL2Queue.front() + // front.should.equal(1) + // const { timestamp, batchHeaderHash } = await l1ToL2Queue.batches(0) + // timestamp.should.equal(0) + // batchHeaderHash.should.equal( + // '0x0000000000000000000000000000000000000000000000000000000000000000' + // ) + // }) + + // it('should throw if submitting an empty batch', async () => { + // const emptyBatch = [] + // const timestamp = 0 + // await canonicalTxChain + // .connect(sequencer) + // .appendTransactionBatch(emptyBatch, timestamp) + // .should.be.revertedWith( + // 'VM Exception while processing transaction: revert Cannot submit an empty batch' + // ) + // }) + + // it('should add to batches array', async () => { + // const batch = ['0x1234', '0x6578'] + // const timestamp = 0 + // const output = await canonicalTxChain + // .connect(sequencer) + // .appendTransactionBatch(batch, timestamp) + // const batchesLength = await canonicalTxChain.getBatchsLength() + // batchesLength.toNumber().should.equal(1) + // }) + + // it('should update cumulativeNumElements correctly', async () => { + // const batch = ['0x1234', '0x5678'] + // const timestamp = 0 + // await canonicalTxChain + // .connect(sequencer) + // .appendTransactionBatch(batch, timestamp) + // const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() + // cumulativeNumElements.toNumber().should.equal(2) + // }) + // it('should allow appendTransactionBatch from sequencer', async () => { + // const batch = ['0x1234', '0x6578'] + // const timestamp = 0 + // await canonicalTxChain + // .connect(sequencer) + // .appendTransactionBatch(batch, timestamp) // Did not throw... success! + // }) + // it('should not allow appendTransactionBatch from other address', async () => { + // const batch = ['0x1234', '0x6578'] + // const timestamp = 0 + // await canonicalTxChain + // .appendTransactionBatch(batch, timestamp) + // .should.be.revertedWith( + // 'VM Exception while processing transaction: revert Message sender does not have permission to append a batch' + // ) + // }) + // it('should calculate batchHeaderHash correctly', async () => { + // const batch = ['0x1234', '0x5678'] + // const batchIndex = 0 + // const cumulativePrevElements = 0 + // const timestamp = 0 + // const localBatch = await enqueueAndGenerateBatch( + // batch, + // timestamp, + // batchIndex, + // cumulativePrevElements + // ) + // //Check batchHeaderHash + // const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + // const calculatedBatchHeaderHash = await canonicalTxChain.batches(0) + // calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + // }) + // it('should add multiple batches correctly', async () => { + // const batch = ['0x1234', '0x5678'] + // const numBatchs = 10 + // for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { + // const timestamp = batchIndex + // const cumulativePrevElements = batch.length * batchIndex + // const localBatch = await enqueueAndGenerateBatch( + // batch, + // timestamp, + // batchIndex, + // cumulativePrevElements + // ) + // //Check batchHeaderHash + // const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + // const calculatedBatchHeaderHash = await canonicalTxChain.batches( + // batchIndex + // ) + // calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + // } + // //check cumulativeNumElements + // const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() + // cumulativeNumElements.toNumber().should.equal(numBatchs * batch.length) + // //check batches length + // const batchesLength = await canonicalTxChain.getBatchsLength() + // batchesLength.toNumber().should.equal(numBatchs) + // }) }) /* diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index d6c509e74f577..71439fc6dd8fd 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -146,5 +146,29 @@ describe('RollupQueue', () => { const batchesLength = await rollupQueue.getBatchesLength() batchesLength.should.equal(numBatches) }) + + it('should throw if dequeueing from empty queue', async () => { + await rollupQueue + .dequeueBatch() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot dequeue from an empty queue' + ) + }) + + it('should throw if dequeueing from a once populated, now empty queue', async () => { + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const numBatches = 3 + for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { + await enqueueAndGenerateBatch(batch) + } + for (let i = 0; i < numBatches; i++) { + await rollupQueue.dequeueBatch() + } + await rollupQueue + .dequeueBatch() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot dequeue from an empty queue' + ) + }) }) }) From a8bdd900bc0b0258c05f2835b0706a43e6929ac2 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Tue, 19 May 2020 23:27:24 -0400 Subject: [PATCH 12/37] remove extraneous comments, working batch header verification --- .../contracts/CanonicalTransactionChain.sol | 3 +- .../CanonicalTransactionChain.spec.ts | 156 +++--------------- .../L1ToL2TransactionQueue.spec.ts | 13 +- .../test/rollup-list/RLhelper.ts | 16 +- 4 files changed, 43 insertions(+), 145 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 525247ef565ed..b25a9c2a177c2 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -55,7 +55,7 @@ contract CanonicalTransactionChain { // verify header is the next to dequeue for the L1->L2 queue bytes32 batchHeaderHash = l1ToL2Queue.hashBatchHeader(_batchHeader); dt.TimestampedHash memory timestampedHash = l1ToL2Queue.getFrontBatch(); - require(batchHeaderHash == timestampedHash.batchHeaderHash, "this aint it chief"); + require(batchHeaderHash == timestampedHash.batchHeaderHash, "This batch header is different than the batch header at the front of the L1ToL2TransactionQueue"); // if (timestamp + sequencerLivenessAssumption > now) { // require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); // } @@ -83,6 +83,7 @@ contract CanonicalTransactionChain { require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch"); require(_txBatch.length > 0, "Cannot submit an empty batch"); + // TODO // require(_timestamp > lastOVMTimestamp, "timestamps must monotonically increase"); // lastOVMTimestamp = _timestamp; // require dist(_timestamp, batch.timestamp) < sequencerLivenessAssumption diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 364bfdd29a5b5..bf0ffbbaca8fe 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -177,7 +177,7 @@ describe('CanonicalTransactionChain', () => { describe('appendL1ToL2Batch()', async () => { let l1ToL2Queue - let localL1ToL2Queue = [] + const localL1ToL2Queue = [] const enqueueAndGenerateQueueBatch = async ( batch: string[] ): Promise => { @@ -196,114 +196,35 @@ describe('CanonicalTransactionChain', () => { L1ToL2TransactionQueue.abi, provider ) - const localBatch = await l1ToL2Queue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) + const localBatch = await enqueueAndGenerateQueueBatch(batch) localL1ToL2Queue.push(localBatch) }) - - // it('should successfully append a L1ToL2Batch', async () => { - // await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() - // const front = await l1ToL2Queue.front() - // front.should.equal(1) - // const { timestamp, batchHeaderHash } = await l1ToL2Queue.batches(0) - // timestamp.should.equal(0) - // batchHeaderHash.should.equal( - // '0x0000000000000000000000000000000000000000000000000000000000000000' - // ) - // }) - - // it('should throw if submitting an empty batch', async () => { - // const emptyBatch = [] - // const timestamp = 0 - // await canonicalTxChain - // .connect(sequencer) - // .appendTransactionBatch(emptyBatch, timestamp) - // .should.be.revertedWith( - // 'VM Exception while processing transaction: revert Cannot submit an empty batch' - // ) - // }) - - // it('should add to batches array', async () => { - // const batch = ['0x1234', '0x6578'] - // const timestamp = 0 - // const output = await canonicalTxChain - // .connect(sequencer) - // .appendTransactionBatch(batch, timestamp) - // const batchesLength = await canonicalTxChain.getBatchsLength() - // batchesLength.toNumber().should.equal(1) - // }) - - // it('should update cumulativeNumElements correctly', async () => { - // const batch = ['0x1234', '0x5678'] - // const timestamp = 0 - // await canonicalTxChain - // .connect(sequencer) - // .appendTransactionBatch(batch, timestamp) - // const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() - // cumulativeNumElements.toNumber().should.equal(2) - // }) - // it('should allow appendTransactionBatch from sequencer', async () => { - // const batch = ['0x1234', '0x6578'] - // const timestamp = 0 - // await canonicalTxChain - // .connect(sequencer) - // .appendTransactionBatch(batch, timestamp) // Did not throw... success! - // }) - // it('should not allow appendTransactionBatch from other address', async () => { - // const batch = ['0x1234', '0x6578'] - // const timestamp = 0 - // await canonicalTxChain - // .appendTransactionBatch(batch, timestamp) - // .should.be.revertedWith( - // 'VM Exception while processing transaction: revert Message sender does not have permission to append a batch' - // ) - // }) - // it('should calculate batchHeaderHash correctly', async () => { - // const batch = ['0x1234', '0x5678'] - // const batchIndex = 0 - // const cumulativePrevElements = 0 - // const timestamp = 0 - // const localBatch = await enqueueAndGenerateBatch( - // batch, - // timestamp, - // batchIndex, - // cumulativePrevElements - // ) - // //Check batchHeaderHash - // const expectedBatchHeaderHash = await localBatch.hashBatchHeader() - // const calculatedBatchHeaderHash = await canonicalTxChain.batches(0) - // calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) - // }) - // it('should add multiple batches correctly', async () => { - // const batch = ['0x1234', '0x5678'] - // const numBatchs = 10 - // for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { - // const timestamp = batchIndex - // const cumulativePrevElements = batch.length * batchIndex - // const localBatch = await enqueueAndGenerateBatch( - // batch, - // timestamp, - // batchIndex, - // cumulativePrevElements - // ) - // //Check batchHeaderHash - // const expectedBatchHeaderHash = await localBatch.hashBatchHeader() - // const calculatedBatchHeaderHash = await canonicalTxChain.batches( - // batchIndex - // ) - // calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) - // } - // //check cumulativeNumElements - // const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() - // cumulativeNumElements.toNumber().should.equal(numBatchs * batch.length) - // //check batches length - // const batchesLength = await canonicalTxChain.getBatchsLength() - // batchesLength.toNumber().should.equal(numBatchs) - // }) + it.only('should revert when passed an incorrect batch header', async () => { + const localBatchHeader = await localL1ToL2Queue[0].getBatchHeader() + localBatchHeader.numElementsInBatch++ + await canonicalTxChain + .connect(sequencer) + .appendL1ToL2Batch(localBatchHeader) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert This batch header is different than the batch header at the front of the L1ToL2TransactionQueue' + ) + }) + it('should successfully dequeue a L1ToL2Batch', async () => { + const localBatchHeader = await localL1ToL2Queue[0].getBatchHeader() + console.log('local', localBatchHeader) + await canonicalTxChain + .connect(sequencer) + .appendL1ToL2Batch(localBatchHeader) + const front = await l1ToL2Queue.front() + front.should.equal(1) + const { timestamp, batchHeaderHash } = await l1ToL2Queue.batches(0) + timestamp.should.equal(0) + batchHeaderHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + }) }) - /* - * Test verifyElement() - */ describe('verifyElement() ', async () => { it('should return true for valid elements for different batchIndexes', async () => { const maxBatchNumber = 5 @@ -314,7 +235,6 @@ describe('CanonicalTransactionChain', () => { batchIndex < maxBatchNumber + 1; batchIndex++ ) { - log.debug(`testing valid proof for batch #: ${batchIndex}`) const timestamp = batchIndex const cumulativePrevElements = batch.length * batchIndex const localBatch = await enqueueAndGenerateBatch( @@ -330,19 +250,11 @@ describe('CanonicalTransactionChain', () => { const elementInclusionProof = await localBatch.getElementInclusionProof( elementIndex ) - log.debug( - `trying to correctly verify this inclusion proof: ${JSON.stringify( - elementInclusionProof - )}` - ) - //run verifyElement() - // const isIncluded = await canonicalTxChain.verifyElement( element, position, elementInclusionProof ) - log.debug('isIncluded: ', JSON.stringify(isIncluded)) isIncluded.should.equal(true) } }) @@ -364,21 +276,13 @@ describe('CanonicalTransactionChain', () => { const elementInclusionProof = await localBatch.getElementInclusionProof( elementIndex ) - log.debug( - `trying to falsely verify this inclusion proof: ${JSON.stringify( - elementInclusionProof - )}` - ) //Give wrong position so inclusion proof is wrong const wrongPosition = position + 1 - //run verifyElement() - // const isIncluded = await canonicalTxChain.verifyElement( element, wrongPosition, elementInclusionProof ) - log.debug('isIncluded: ', JSON.stringify(isIncluded)) isIncluded.should.equal(false) }) @@ -404,19 +308,11 @@ describe('CanonicalTransactionChain', () => { const wrongPosition = position + 1 //Change index to also be false (so position = index + cumulative) elementInclusionProof.indexInBatch++ - log.debug( - `trying to falsely verify this inclusion proof: ${JSON.stringify( - elementInclusionProof - )}` - ) - //run verifyElement() - // const isIncluded = await canonicalTxChain.verifyElement( element, wrongPosition, elementInclusionProof ) - log.debug('isIncluded: ', JSON.stringify(isIncluded)) isIncluded.should.equal(false) }) }) diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index de046c9c98ec8..ea1915b5f5c09 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -48,9 +48,6 @@ describe('L1ToL2TransactionQueue', () => { ) }) - /* - * Test enqueueBatch() - */ describe('enqueueBatch() ', async () => { it('should allow enqueue from l1ToL2TransactionPasser', async () => { const batch = ['0x1234'] @@ -65,9 +62,7 @@ describe('L1ToL2TransactionQueue', () => { ) }) }) - /* - * Test dequeueBatch() - */ + describe('dequeueBatch() ', async () => { it('should allow dequeue from canonicalTransactionChain', async () => { const batch = ['0x1234'] @@ -75,25 +70,19 @@ describe('L1ToL2TransactionQueue', () => { const batchIndex = 0 await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) let batchesLength = await l1ToL2TxQueue.getBatchesLength() - log.debug(`batchesLength before deletion: ${batchesLength}`) let front = await l1ToL2TxQueue.front() - log.debug(`front before deletion: ${front}`) let firstBatchHash = await l1ToL2TxQueue.batches(0) - log.debug(`firstBatchHash before deletion: ${firstBatchHash}`) // delete the single appended batch await l1ToL2TxQueue.connect(canonicalTransactionChain).dequeueBatch() batchesLength = await l1ToL2TxQueue.getBatchesLength() - log.debug(`batchesLength after deletion: ${batchesLength}`) batchesLength.should.equal(1) firstBatchHash = (await l1ToL2TxQueue.batches(0)).batchHeaderHash - log.debug(`firstBatchHash after deletion: ${firstBatchHash}`) firstBatchHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) front = await l1ToL2TxQueue.front() - log.debug(`front after deletion: ${front}`) front.should.equal(1) }) it('should not allow dequeue from other address', async () => { diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 62e4c37d2aad6..010e2e3743d60 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -10,7 +10,7 @@ import { newInMemoryDB, SparseMerkleTreeImpl } from '@eth-optimism/core-db' import { utils } from 'ethers' -interface BatchHeader { +interface TxChainBatchHeader { timestamp: number isL1ToL2Tx: boolean elementsMerkleRoot: string @@ -20,11 +20,16 @@ interface BatchHeader { interface ElementInclusionProof { batchIndex: number - batchHeader: BatchHeader + batchHeader: TxChainBatchHeader indexInBatch: number siblings: string[] } +interface TxQueueBatchHeader { + elementsMerkleRoot: string + numElementsInBatch: number +} + /* * Helper class which provides all information requried for a particular * Rollup batch. This includes all of the tranisitions in readable form @@ -160,6 +165,13 @@ export class RollupQueueBatch { ) } } + public async getBatchHeader(): Promise { + const bufferRoot = await this.elementsMerkleTree.getRootHash() + return { + elementsMerkleRoot: bufToHexString(bufferRoot), + numElementsInBatch: this.elements.length, + } + } public async hashBatchHeader(): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() From e61c794226d3e820c188f824e26008f2bbee3245 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Wed, 20 May 2020 17:25:20 -0400 Subject: [PATCH 13/37] constrain rollupQueue to single tx batches --- .../contracts/CanonicalTransactionChain.sol | 6 +- .../rollup-contracts/contracts/DataTypes.sol | 2 +- .../contracts/RollupQueue.sol | 30 +----- .../CanonicalTransactionChain.spec.ts | 49 ++++------ .../L1ToL2TransactionQueue.spec.ts | 37 +++----- .../test/rollup-list/RLhelper.ts | 21 ++--- .../test/rollup-list/RollupQueue.spec.ts | 94 ++++++++----------- 7 files changed, 88 insertions(+), 151 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index b25a9c2a177c2..f07171200cc1d 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -51,11 +51,11 @@ contract CanonicalTransactionChain { return _sender == sequencer; } - function appendL1ToL2Batch(dt.TxQueueBatchHeader memory _batchHeader) public { + function appendL1ToL2Batch() public { // verify header is the next to dequeue for the L1->L2 queue - bytes32 batchHeaderHash = l1ToL2Queue.hashBatchHeader(_batchHeader); + // bytes32 batchHeaderHash = l1ToL2Queue.hashBatchHeader(_batchHeader); dt.TimestampedHash memory timestampedHash = l1ToL2Queue.getFrontBatch(); - require(batchHeaderHash == timestampedHash.batchHeaderHash, "This batch header is different than the batch header at the front of the L1ToL2TransactionQueue"); + // require(batchHeaderHash == timestampedHash.batchHeaderHash, "This batch header is different than the batch header at the front of the L1ToL2TransactionQueue"); // if (timestamp + sequencerLivenessAssumption > now) { // require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); // } diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index a8f0d04971ea8..e3245408570e0 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -33,6 +33,6 @@ contract DataTypes { struct TimestampedHash { uint timestamp; - bytes32 batchHeaderHash; + bytes32 txHash; } } diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index e79d7054ab732..bf8ee3a4fa039 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -6,8 +6,6 @@ import {DataTypes as dt} from "./DataTypes.sol"; import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; contract RollupQueue { - // How many elements in total have been appended - uint public cumulativeNumElements; // List of batch header hashes dt.TimestampedHash[] public batches; uint256 public front; //Index of the first batchHeaderHash in the list @@ -32,35 +30,17 @@ contract RollupQueue { return batches[front]; } - function hashBatchHeader( - dt.TxQueueBatchHeader memory _batchHeader - ) public pure returns (bytes32) { - return keccak256(abi.encodePacked( - _batchHeader.elementsMerkleRoot, - _batchHeader.numElementsInBatch - )); - } - function authenticateEnqueue(address _sender) public view returns (bool) { return true; } function authenticateDequeue(address _sender) public view returns (bool) { return true; } // enqueues to the end of the current queue of batches - function enqueueBatch(bytes[] memory _rollupBatch) public { - //Check that msg.sender is authorized to append + function enqueueTx(bytes memory _tx) public { require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); - require(_rollupBatch.length > 0, "Cannot submit an empty batch"); - // calculate batch header - bytes32 batchHeaderHash = keccak256( - abi.encodePacked( - merkleUtils.getMerkleRoot(_rollupBatch), // elementsMerkleRoot - _rollupBatch.length // numElementsInBatch - ) + dt.TimestampedHash memory timestampedHash = dt.TimestampedHash( + now, + keccak256(_tx) ); - dt.TimestampedHash memory timestampedBatchHeaderHash = dt.TimestampedHash(now, batchHeaderHash); - // store batch header - batches.push(timestampedBatchHeaderHash); - // update cumulative elements - cumulativeNumElements += _rollupBatch.length; + batches.push(timestampedHash); } // dequeues the first (oldest) batch diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index bf0ffbbaca8fe..eaf61fa981308 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -51,7 +51,7 @@ describe('CanonicalTransactionChain', () => { ) }) - const enqueueAndGenerateBatch = async ( + const appendAndGenerateBatch = async ( batch: string[], timestamp: number, batchIndex: number, @@ -136,7 +136,7 @@ describe('CanonicalTransactionChain', () => { const batchIndex = 0 const cumulativePrevElements = 0 const timestamp = 0 - const localBatch = await enqueueAndGenerateBatch( + const localBatch = await appendAndGenerateBatch( batch, timestamp, batchIndex, @@ -153,7 +153,7 @@ describe('CanonicalTransactionChain', () => { for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { const timestamp = batchIndex const cumulativePrevElements = batch.length * batchIndex - const localBatch = await enqueueAndGenerateBatch( + const localBatch = await appendAndGenerateBatch( batch, timestamp, batchIndex, @@ -178,48 +178,39 @@ describe('CanonicalTransactionChain', () => { describe('appendL1ToL2Batch()', async () => { let l1ToL2Queue const localL1ToL2Queue = [] - const enqueueAndGenerateQueueBatch = async ( - batch: string[] + const enqueueAndGenerateBatch = async ( + tx: string ): Promise => { // Submit the rollup batch on-chain - await l1ToL2Queue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) + const enqueueTx = await l1ToL2Queue + .connect(l1ToL2TransactionPasser) + .enqueueTx(tx) + const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) + const timestamp = (await provider.getBlock(txReceipt.blockNumber)) + .timestamp // Generate a local version of the rollup batch - const localBatch = new RollupQueueBatch(batch) + const localBatch = new RollupQueueBatch(tx, timestamp) await localBatch.generateTree() return localBatch } beforeEach(async () => { - const batch = ['0x1234', '0x1234'] + const tx = '0x1234' const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() l1ToL2Queue = new Contract( l1ToL2QueueAddress, L1ToL2TransactionQueue.abi, provider ) - const localBatch = await enqueueAndGenerateQueueBatch(batch) + const localBatch = await enqueueAndGenerateBatch(tx) localL1ToL2Queue.push(localBatch) }) - it.only('should revert when passed an incorrect batch header', async () => { - const localBatchHeader = await localL1ToL2Queue[0].getBatchHeader() - localBatchHeader.numElementsInBatch++ - await canonicalTxChain - .connect(sequencer) - .appendL1ToL2Batch(localBatchHeader) - .should.be.revertedWith( - 'VM Exception while processing transaction: revert This batch header is different than the batch header at the front of the L1ToL2TransactionQueue' - ) - }) it('should successfully dequeue a L1ToL2Batch', async () => { - const localBatchHeader = await localL1ToL2Queue[0].getBatchHeader() - console.log('local', localBatchHeader) - await canonicalTxChain - .connect(sequencer) - .appendL1ToL2Batch(localBatchHeader) + await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() const front = await l1ToL2Queue.front() front.should.equal(1) - const { timestamp, batchHeaderHash } = await l1ToL2Queue.batches(0) + const { timestamp, txHash } = await l1ToL2Queue.batches(0) timestamp.should.equal(0) - batchHeaderHash.should.equal( + txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) }) @@ -237,7 +228,7 @@ describe('CanonicalTransactionChain', () => { ) { const timestamp = batchIndex const cumulativePrevElements = batch.length * batchIndex - const localBatch = await enqueueAndGenerateBatch( + const localBatch = await appendAndGenerateBatch( batch, timestamp, batchIndex, @@ -264,7 +255,7 @@ describe('CanonicalTransactionChain', () => { const cumulativePrevElements = 0 const batchIndex = 0 const timestamp = 0 - const localBatch = await enqueueAndGenerateBatch( + const localBatch = await appendAndGenerateBatch( batch, timestamp, batchIndex, @@ -291,7 +282,7 @@ describe('CanonicalTransactionChain', () => { const cumulativePrevElements = 0 const batchIndex = 0 const timestamp = 0 - const localBatch = await enqueueAndGenerateBatch( + const localBatch = await appendAndGenerateBatch( batch, timestamp, batchIndex, diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index ea1915b5f5c09..8c3e75f0d9a45 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -50,13 +50,15 @@ describe('L1ToL2TransactionQueue', () => { describe('enqueueBatch() ', async () => { it('should allow enqueue from l1ToL2TransactionPasser', async () => { - const batch = ['0x1234'] - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) // Did not throw... success! + const tx = '0x1234' + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(tx) // Did not throw... success! + const batchesLength = await l1ToL2TxQueue.getBatchesLength() + batchesLength.should.equal(1) }) it('should not allow enqueue from other address', async () => { - const batch = ['0x1234'] + const tx = '0x1234' await l1ToL2TxQueue - .enqueueBatch(batch) + .enqueueTx(tx) .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' ) @@ -65,31 +67,22 @@ describe('L1ToL2TransactionQueue', () => { describe('dequeueBatch() ', async () => { it('should allow dequeue from canonicalTransactionChain', async () => { - const batch = ['0x1234'] - const cumulativePrevElements = 0 - const batchIndex = 0 - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) - let batchesLength = await l1ToL2TxQueue.getBatchesLength() - let front = await l1ToL2TxQueue.front() - let firstBatchHash = await l1ToL2TxQueue.batches(0) - - // delete the single appended batch + const tx = '0x1234' + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(tx) await l1ToL2TxQueue.connect(canonicalTransactionChain).dequeueBatch() - - batchesLength = await l1ToL2TxQueue.getBatchesLength() + const batchesLength = await l1ToL2TxQueue.getBatchesLength() batchesLength.should.equal(1) - firstBatchHash = (await l1ToL2TxQueue.batches(0)).batchHeaderHash - firstBatchHash.should.equal( + const { txHash, timestamp } = await l1ToL2TxQueue.batches(0) + txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) - front = await l1ToL2TxQueue.front() + timestamp.should.equal(0) + const front = await l1ToL2TxQueue.front() front.should.equal(1) }) it('should not allow dequeue from other address', async () => { - const batch = ['0x1234'] - const cumulativePrevElements = 0 - const batchIndex = 0 - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueBatch(batch) + const tx = '0x1234' + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(tx) await l1ToL2TxQueue .dequeueBatch() .should.be.revertedWith( diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 010e2e3743d60..b86e860b1ecd2 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -143,9 +143,11 @@ export class DefaultRollupBatch { export class RollupQueueBatch { public elements: string[] //Rollup batch public elementsMerkleTree: SparseMerkleTreeImpl + public timestamp: number - constructor(elements: string[]) { - this.elements = elements + constructor(tx: string, timestamp: number) { + this.elements = [tx] + this.timestamp = timestamp } /* * Generate the elements merkle tree from this.elements @@ -165,19 +167,8 @@ export class RollupQueueBatch { ) } } - public async getBatchHeader(): Promise { + public async getMerkleRoot(): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() - return { - elementsMerkleRoot: bufToHexString(bufferRoot), - numElementsInBatch: this.elements.length, - } - } - - public async hashBatchHeader(): Promise { - const bufferRoot = await this.elementsMerkleTree.getRootHash() - return utils.solidityKeccak256( - ['bytes32', 'uint'], - [bufToHexString(bufferRoot), this.elements.length] - ) + return bufToHexString(bufferRoot) } } diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 71439fc6dd8fd..e4ec1a653db71 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -41,69 +41,49 @@ describe('RollupQueue', () => { }) const enqueueAndGenerateBatch = async ( - batch: string[] + tx: string ): Promise => { // Submit the rollup batch on-chain - await rollupQueue.enqueueBatch(batch) + const enqueueTx = await rollupQueue.enqueueTx(tx) + const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) + const timestamp = (await provider.getBlock(txReceipt.blockNumber)).timestamp // Generate a local version of the rollup batch - const localBatch = new RollupQueueBatch(batch) + const localBatch = new RollupQueueBatch(tx, timestamp) await localBatch.generateTree() return localBatch } /* - * Test enqueueBatch() + * Test enqueueTx() */ - describe('enqueueBatch() ', async () => { + describe('enqueueTx() ', async () => { it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { - const batch = ['0x1234', '0x1234'] - await rollupQueue.enqueueBatch(batch) // Did not throw... success! + const tx = '0x1234' + await rollupQueue.enqueueTx(tx) // Did not throw... success! }) - - it('should throw if submitting an empty batch', async () => { - const emptyBatch = [] - await rollupQueue - .enqueueBatch(emptyBatch) - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Cannot submit an empty batch' - ) - }) - it('should add to batches array', async () => { - const batch = ['0x1234', '0x6578'] - const output = await rollupQueue.enqueueBatch(batch) + const tx = '0x1234' + const output = await rollupQueue.enqueueTx(tx) const batchesLength = await rollupQueue.getBatchesLength() batchesLength.toNumber().should.equal(1) }) - - it('should update cumulativeNumElements correctly', async () => { - const batch = ['0x1234', '0x5678'] - await rollupQueue.enqueueBatch(batch) - const cumulativeNumElements = await rollupQueue.cumulativeNumElements.call() - cumulativeNumElements.toNumber().should.equal(2) - }) - - it('should calculate batchHeaderHash correctly', async () => { - const batch = ['0x1234', '0x5678'] - const localBatch = await enqueueAndGenerateBatch(batch) - //Check batchHeaderHash - const expectedBatchHeaderHash = await localBatch.hashBatchHeader() - const calculatedBatchHeaderHash = (await rollupQueue.batches(0)) - .batchHeaderHash - calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + it('should calculate set the TimestampedHash correctly', async () => { + const tx = '0x1234' + const localBatch = await enqueueAndGenerateBatch(tx) + const { txHash, timestamp } = await rollupQueue.batches(0) + const expectedBatchHeaderHash = await localBatch.getMerkleRoot() + txHash.should.equal(expectedBatchHeaderHash) + timestamp.should.equal(localBatch.timestamp) }) it('should add multiple batches correctly', async () => { - const batch = ['0x1234', '0x5678'] + const tx = '0x1234' const numBatches = 10 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const cumulativePrevElements = batch.length * batchIndex - const localBatch = await enqueueAndGenerateBatch(batch) - //Check batchHeaderHash - const expectedBatchHeaderHash = await localBatch.hashBatchHeader() - const calculatedBatchHeaderHash = ( - await rollupQueue.batches(batchIndex) - ).batchHeaderHash - calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + const localBatch = await enqueueAndGenerateBatch(tx) + const { txHash, timestamp } = await rollupQueue.batches(batchIndex) + const expectedBatchHeaderHash = await localBatch.getMerkleRoot() + txHash.should.equal(expectedBatchHeaderHash) + timestamp.should.equal(localBatch.timestamp) } //check batches length const batchesLength = await rollupQueue.getBatchesLength() @@ -113,35 +93,37 @@ describe('RollupQueue', () => { describe('dequeueBatch()', async () => { it('should dequeue single batch', async () => { - const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBatch = await enqueueAndGenerateBatch(batch) + const tx = '0x1234' + const localBatch = await enqueueAndGenerateBatch(tx) // delete the single appended batch await rollupQueue.dequeueBatch() const batchesLength = await rollupQueue.getBatchesLength() batchesLength.should.equal(1) - const firstBatchHash = (await rollupQueue.batches(0)).batchHeaderHash - firstBatchHash.should.equal( + const { txHash, timestamp } = await rollupQueue.batches(0) + txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) + timestamp.should.equal(0) const front = await rollupQueue.front() front.should.equal(1) }) it('should dequeue many batches', async () => { - const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const tx = '0x1234' const numBatches = 5 - for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - await enqueueAndGenerateBatch(batch) + for (let i = 0; i < numBatches; i++) { + await enqueueAndGenerateBatch(tx) } for (let i = 0; i < numBatches; i++) { await rollupQueue.dequeueBatch() const front = await rollupQueue.front() front.should.equal(i + 1) - const ithBatchHash = (await rollupQueue.batches(i)).batchHeaderHash - ithBatchHash.should.equal( + const { txHash, timestamp } = await rollupQueue.batches(i) + txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) + timestamp.should.equal(0) } const batchesLength = await rollupQueue.getBatchesLength() batchesLength.should.equal(numBatches) @@ -156,10 +138,10 @@ describe('RollupQueue', () => { }) it('should throw if dequeueing from a once populated, now empty queue', async () => { - const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const tx = '0x1234' const numBatches = 3 - for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - await enqueueAndGenerateBatch(batch) + for (let i = 0; i < numBatches; i++) { + await enqueueAndGenerateBatch(tx) } for (let i = 0; i < numBatches; i++) { await rollupQueue.dequeueBatch() From 8b87ebb6ee3c62f8ca1a4ce9b97b63b7bf41b896 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Wed, 20 May 2020 18:50:28 -0400 Subject: [PATCH 14/37] add main appendL1ToL2TxBatch functionality --- .../contracts/CanonicalTransactionChain.sol | 43 +++++++++---------- .../CanonicalTransactionChain.spec.ts | 23 ++++++++-- 2 files changed, 40 insertions(+), 26 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index f07171200cc1d..a7c2b59e8a11e 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -52,28 +52,28 @@ contract CanonicalTransactionChain { } function appendL1ToL2Batch() public { - // verify header is the next to dequeue for the L1->L2 queue - // bytes32 batchHeaderHash = l1ToL2Queue.hashBatchHeader(_batchHeader); dt.TimestampedHash memory timestampedHash = l1ToL2Queue.getFrontBatch(); - // require(batchHeaderHash == timestampedHash.batchHeaderHash, "This batch header is different than the batch header at the front of the L1ToL2TransactionQueue"); - // if (timestamp + sequencerLivenessAssumption > now) { - // require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); - // } - // require(_timestamp > lastOVMTimestamp, "timestamps must monotonically increase"); - // lastOVMTimestamp = _timestamp; + uint timestamp = timestampedHash.timestamp; + bytes32 elementsMerkleRoot = timestampedHash.txHash; + if (timestamp + sequencerLivenessAssumption > now) { + require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); + } + require(timestamp >= latestOVMTimestamp, "Timestamps must be monotonically increasing"); + latestOVMTimestamp = timestamp; // // TODO require proposed timestamp is not too far away from currnt timestamp // // require dist(_timestamp, block.timestamp) < sequencerLivenessAssumption // // calculate batch header - // bytes32 batchHeaderHash = keccak256(abi.encodePacked( - // _timestamp, - // false, // isL1ToL2Tx - // merkleUtils.getMerkleRoot(_txBatch), // elementsMerkleRoot - // _txBatch.length, // numElementsInBatch - // cumulativeNumElements // cumulativeNumElements - // )); - // // store batch header - // batches.push(batchHeaderHash); - // cumulativeElements += _header.numElementsInBlock; + uint numElementsInBatch = 1; + bytes32 batchHeaderHash = keccak256(abi.encodePacked( + timestamp, + true, // isL1ToL2Tx + elementsMerkleRoot, + numElementsInBatch, // numElementsInBatch + cumulativeNumElements // cumulativePrevElements + )); + // store batch header + batches.push(batchHeaderHash); + cumulativeNumElements++; // add a single tx l1ToL2Queue.dequeueBatch(); } @@ -82,12 +82,11 @@ contract CanonicalTransactionChain { //Check that msg.sender is authorized to append require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch"); require(_txBatch.length > 0, "Cannot submit an empty batch"); - - // TODO - // require(_timestamp > lastOVMTimestamp, "timestamps must monotonically increase"); - // lastOVMTimestamp = _timestamp; + require(_timestamp >= latestOVMTimestamp, "timestamps must monotonically increase"); + latestOVMTimestamp = _timestamp; // require dist(_timestamp, batch.timestamp) < sequencerLivenessAssumption // require(L1ToL2Queue.ageOfOldestQueuedBatch() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches.") + // TODO check that this timestamp is before that of the oldest slowQueue and l1ToL2Queue batches // calculate batch header bytes32 batchHeaderHash = keccak256(abi.encodePacked( diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index eaf61fa981308..f060c7b55df64 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -178,23 +178,23 @@ describe('CanonicalTransactionChain', () => { describe('appendL1ToL2Batch()', async () => { let l1ToL2Queue const localL1ToL2Queue = [] + const tx = '0x1234' const enqueueAndGenerateBatch = async ( - tx: string + _tx: string ): Promise => { // Submit the rollup batch on-chain const enqueueTx = await l1ToL2Queue .connect(l1ToL2TransactionPasser) - .enqueueTx(tx) + .enqueueTx(_tx) const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) const timestamp = (await provider.getBlock(txReceipt.blockNumber)) .timestamp // Generate a local version of the rollup batch - const localBatch = new RollupQueueBatch(tx, timestamp) + const localBatch = new RollupQueueBatch(_tx, timestamp) await localBatch.generateTree() return localBatch } beforeEach(async () => { - const tx = '0x1234' const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() l1ToL2Queue = new Contract( l1ToL2QueueAddress, @@ -214,6 +214,21 @@ describe('CanonicalTransactionChain', () => { '0x0000000000000000000000000000000000000000000000000000000000000000' ) }) + it('should successfully append a L1ToL2Batch', async () => { + const { timestamp, txHash } = await l1ToL2Queue.batches(0) + const localBatch = new DefaultRollupBatch( + timestamp, + true, // isL1ToL2Tx + 0, //batchIndex + 0, // cumulativePrevElements + [tx] // elements + ) + await localBatch.generateTree() + const localBatchHeaderHash = await localBatch.hashBatchHeader() + await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() + const batchHeaderHash = await canonicalTxChain.batches(0) + batchHeaderHash.should.equal(localBatchHeaderHash) + }) }) describe('verifyElement() ', async () => { From a09eae3d8536358e34a8e8a8e25470706bb1907c Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 21 May 2020 14:49:19 -0400 Subject: [PATCH 15/37] added appendTxBatch timestamp functionality --- .../contracts/CanonicalTransactionChain.sol | 31 ++++----- .../contracts/RollupQueue.sol | 11 ++- .../CanonicalTransactionChain.spec.ts | 67 ++++++++----------- 3 files changed, 50 insertions(+), 59 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index a7c2b59e8a11e..5bfa928198fcf 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -7,13 +7,9 @@ import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; import {L1ToL2TransactionQueue} from "./L1ToL2TransactionQueue.sol"; contract CanonicalTransactionChain { - // The Rollup Merkle Tree library (currently a contract for ease of testing) RollupMerkleUtils merkleUtils; address public sequencer; - - // How many elements in total have been appended uint public cumulativeNumElements; - // List of batch header hashes bytes32[] public batches; uint public latestOVMTimestamp = 0; uint sequencerLivenessAssumption; @@ -22,16 +18,16 @@ contract CanonicalTransactionChain { constructor( address _rollupMerkleUtilsAddress, address _sequencer, - address _l1ToL2TransactionPasserAddress + address _l1ToL2TransactionPasserAddress, + uint _sequencerLivenessAssumption ) public { merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); sequencer = _sequencer; l1ToL2Queue = new L1ToL2TransactionQueue(_rollupMerkleUtilsAddress, _l1ToL2TransactionPasserAddress, address(this)); - sequencerLivenessAssumption = 100000000000000000000000000; // TODO parameterize this + sequencerLivenessAssumption =_sequencerLivenessAssumption; } - // for testing: returns length of batch list - function getBatchsLength() public view returns (uint) { + function getBatchesLength() public view returns (uint) { return batches.length; } @@ -55,6 +51,7 @@ contract CanonicalTransactionChain { dt.TimestampedHash memory timestampedHash = l1ToL2Queue.getFrontBatch(); uint timestamp = timestampedHash.timestamp; bytes32 elementsMerkleRoot = timestampedHash.txHash; + uint numElementsInBatch = 1; if (timestamp + sequencerLivenessAssumption > now) { require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); } @@ -63,7 +60,6 @@ contract CanonicalTransactionChain { // // TODO require proposed timestamp is not too far away from currnt timestamp // // require dist(_timestamp, block.timestamp) < sequencerLivenessAssumption // // calculate batch header - uint numElementsInBatch = 1; bytes32 batchHeaderHash = keccak256(abi.encodePacked( timestamp, true, // isL1ToL2Tx @@ -77,18 +73,17 @@ contract CanonicalTransactionChain { l1ToL2Queue.dequeueBatch(); } - // appends to the current list of batches function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { - //Check that msg.sender is authorized to append require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch"); require(_txBatch.length > 0, "Cannot submit an empty batch"); - require(_timestamp >= latestOVMTimestamp, "timestamps must monotonically increase"); + require(_timestamp >= latestOVMTimestamp, "Timestamps must monotonically increase"); latestOVMTimestamp = _timestamp; - // require dist(_timestamp, batch.timestamp) < sequencerLivenessAssumption - // require(L1ToL2Queue.ageOfOldestQueuedBatch() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches.") - // TODO check that this timestamp is before that of the oldest slowQueue and l1ToL2Queue batches - - // calculate batch header + require(_timestamp + sequencerLivenessAssumption > now, "cannot submit a batch with a timestamp older than the sequencer liveness assumption."); + require(_timestamp <= now, "cannot submit a batch with a timestamp in the future"); + if(!l1ToL2Queue.isEmpty()) { + require(l1ToL2Queue.ageOfOldestQueuedBatch() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches."); + require(_timestamp <= l1ToL2Queue.ageOfOldestQueuedBatch(), "Must process older queued batches first to enforce timestamp monotonicity"); + } bytes32 batchHeaderHash = keccak256(abi.encodePacked( _timestamp, false, // isL1ToL2Tx @@ -96,9 +91,7 @@ contract CanonicalTransactionChain { _txBatch.length, // numElementsInBatch cumulativeNumElements // cumulativeNumElements )); - // store batch header batches.push(batchHeaderHash); - // update cumulative elements cumulativeNumElements += _txBatch.length; } diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index bf8ee3a4fa039..d66f650098e4d 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -25,11 +25,20 @@ contract RollupQueue { return batches.length; } + function isEmpty() public view returns (bool) { + return front >= batches.length; + } + function getFrontBatch() public view returns (dt.TimestampedHash memory) { - require(front < batches.length, "Cannot get front batch from an empty queue"); + require(!isEmpty(), "Cannot get front batch from an empty queue"); return batches[front]; } + function ageOfOldestQueuedBatch() public view returns (uint) { + dt.TimestampedHash memory frontBatch = getFrontBatch(); + return frontBatch.timestamp; + } + function authenticateEnqueue(address _sender) public view returns (bool) { return true; } function authenticateDequeue(address _sender) public view returns (bool) { return true; } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index f060c7b55df64..cba4a04416439 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -44,23 +44,27 @@ describe('CanonicalTransactionChain', () => { rollupMerkleUtils.address, sequencer.address, l1ToL2TransactionPasser.address, + 600, //600 seconds = 10 min ], { gasLimit: 6700000, } ) }) - + const appendBatch = async (batch: string[]): Promise => { + const timestamp = Math.floor(Date.now() / 1000) + // Submit the rollup batch on-chain + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, timestamp) + return timestamp + } const appendAndGenerateBatch = async ( batch: string[], - timestamp: number, batchIndex: number, cumulativePrevElements: number ): Promise => { - // Submit the rollup batch on-chain - await canonicalTxChain - .connect(sequencer) - .appendTransactionBatch(batch, timestamp) + const timestamp = await appendBatch(batch) // Generate a local version of the rollup batch const localBatch = new DefaultRollupBatch( timestamp, @@ -79,52 +83,37 @@ describe('CanonicalTransactionChain', () => { describe('appendTransactionBatch()', async () => { it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { const batch = ['0x1234', '0x1234'] - const timestamp = 0 - await canonicalTxChain - .connect(sequencer) - .appendTransactionBatch(batch, timestamp) // Did not throw... success! + await appendBatch(batch) }) it('should throw if submitting an empty batch', async () => { const emptyBatch = [] - const timestamp = 0 - await canonicalTxChain - .connect(sequencer) - .appendTransactionBatch(emptyBatch, timestamp) - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Cannot submit an empty batch' - ) + await appendBatch(emptyBatch).should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot submit an empty batch' + ) }) it('should add to batches array', async () => { const batch = ['0x1234', '0x6578'] - const timestamp = 0 - const output = await canonicalTxChain - .connect(sequencer) - .appendTransactionBatch(batch, timestamp) - const batchesLength = await canonicalTxChain.getBatchsLength() + await appendBatch(batch) + const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(1) }) it('should update cumulativeNumElements correctly', async () => { const batch = ['0x1234', '0x5678'] - const timestamp = 0 - await canonicalTxChain - .connect(sequencer) - .appendTransactionBatch(batch, timestamp) + await appendBatch(batch) const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(2) }) it('should allow appendTransactionBatch from sequencer', async () => { const batch = ['0x1234', '0x6578'] - const timestamp = 0 - await canonicalTxChain - .connect(sequencer) - .appendTransactionBatch(batch, timestamp) // Did not throw... success! + await appendBatch(batch) }) it('should not allow appendTransactionBatch from other address', async () => { const batch = ['0x1234', '0x6578'] - const timestamp = 0 + const timestamp = Math.floor(Date.now() / 1000) + // Submit the rollup batch on-chain await canonicalTxChain .appendTransactionBatch(batch, timestamp) .should.be.revertedWith( @@ -135,10 +124,8 @@ describe('CanonicalTransactionChain', () => { const batch = ['0x1234', '0x5678'] const batchIndex = 0 const cumulativePrevElements = 0 - const timestamp = 0 const localBatch = await appendAndGenerateBatch( batch, - timestamp, batchIndex, cumulativePrevElements ) @@ -151,11 +138,9 @@ describe('CanonicalTransactionChain', () => { const batch = ['0x1234', '0x5678'] const numBatchs = 10 for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { - const timestamp = batchIndex const cumulativePrevElements = batch.length * batchIndex const localBatch = await appendAndGenerateBatch( batch, - timestamp, batchIndex, cumulativePrevElements ) @@ -170,7 +155,7 @@ describe('CanonicalTransactionChain', () => { const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(numBatchs * batch.length) //check batches length - const batchesLength = await canonicalTxChain.getBatchsLength() + const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatchs) }) }) @@ -229,6 +214,13 @@ describe('CanonicalTransactionChain', () => { const batchHeaderHash = await canonicalTxChain.batches(0) batchHeaderHash.should.equal(localBatchHeaderHash) }) + it('should now allow non-sequencer to appendL1ToL2Batch if less than 10 minutes old', async () => { + await canonicalTxChain + .appendL1ToL2Batch() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to append this batch' + ) + }) }) describe('verifyElement() ', async () => { @@ -245,7 +237,6 @@ describe('CanonicalTransactionChain', () => { const cumulativePrevElements = batch.length * batchIndex const localBatch = await appendAndGenerateBatch( batch, - timestamp, batchIndex, cumulativePrevElements ) @@ -272,7 +263,6 @@ describe('CanonicalTransactionChain', () => { const timestamp = 0 const localBatch = await appendAndGenerateBatch( batch, - timestamp, batchIndex, cumulativePrevElements ) @@ -299,7 +289,6 @@ describe('CanonicalTransactionChain', () => { const timestamp = 0 const localBatch = await appendAndGenerateBatch( batch, - timestamp, batchIndex, cumulativePrevElements ) From 43cd77132f3ba4e4e123fcdb10601e36857d8f4c Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 21 May 2020 16:14:44 -0400 Subject: [PATCH 16/37] clean up comments --- .../contracts/CanonicalTransactionChain.sol | 20 ++++++++----------- .../rollup-contracts/contracts/DataTypes.sol | 5 ----- .../test/rollup-list/RLhelper.ts | 7 +------ 3 files changed, 9 insertions(+), 23 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 5bfa928198fcf..248b28adb8110 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -57,33 +57,29 @@ contract CanonicalTransactionChain { } require(timestamp >= latestOVMTimestamp, "Timestamps must be monotonically increasing"); latestOVMTimestamp = timestamp; - // // TODO require proposed timestamp is not too far away from currnt timestamp - // // require dist(_timestamp, block.timestamp) < sequencerLivenessAssumption - // // calculate batch header bytes32 batchHeaderHash = keccak256(abi.encodePacked( timestamp, true, // isL1ToL2Tx - elementsMerkleRoot, - numElementsInBatch, // numElementsInBatch + elementsMerkleRoot, + numElementsInBatch, cumulativeNumElements // cumulativePrevElements )); - // store batch header batches.push(batchHeaderHash); - cumulativeNumElements++; // add a single tx + cumulativeNumElements += numElementsInBatch; // add a single tx l1ToL2Queue.dequeueBatch(); } function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch"); require(_txBatch.length > 0, "Cannot submit an empty batch"); - require(_timestamp >= latestOVMTimestamp, "Timestamps must monotonically increase"); - latestOVMTimestamp = _timestamp; - require(_timestamp + sequencerLivenessAssumption > now, "cannot submit a batch with a timestamp older than the sequencer liveness assumption."); - require(_timestamp <= now, "cannot submit a batch with a timestamp in the future"); + require(_timestamp + sequencerLivenessAssumption > now, "Cannot submit a batch with a timestamp older than the sequencer liveness assumption."); + require(_timestamp <= now, "Cannot submit a batch with a timestamp in the future"); if(!l1ToL2Queue.isEmpty()) { - require(l1ToL2Queue.ageOfOldestQueuedBatch() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches."); require(_timestamp <= l1ToL2Queue.ageOfOldestQueuedBatch(), "Must process older queued batches first to enforce timestamp monotonicity"); + require(l1ToL2Queue.ageOfOldestQueuedBatch() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches."); } + require(_timestamp >= latestOVMTimestamp, "Timestamps must monotonically increase"); + latestOVMTimestamp = _timestamp; bytes32 batchHeaderHash = keccak256(abi.encodePacked( _timestamp, false, // isL1ToL2Tx diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index e3245408570e0..2d8455c0fa55e 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -26,11 +26,6 @@ contract DataTypes { uint cumulativePrevElements; } - struct TxQueueBatchHeader { - bytes32 elementsMerkleRoot; - uint numElementsInBatch; - } - struct TimestampedHash { uint timestamp; bytes32 txHash; diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index b86e860b1ecd2..76c5fb2d2d0e8 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -25,14 +25,9 @@ interface ElementInclusionProof { siblings: string[] } -interface TxQueueBatchHeader { - elementsMerkleRoot: string - numElementsInBatch: number -} - /* * Helper class which provides all information requried for a particular - * Rollup batch. This includes all of the tranisitions in readable form + * Rollup batch. This includes all of the transactions in readable form * as well as the merkle tree which it generates. */ export class DefaultRollupBatch { From 55e7583a9a999a85493ae1fda3ae11fb26ff1395 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 21 May 2020 16:19:57 -0400 Subject: [PATCH 17/37] getFrontBatch -> peek --- .../contracts/CanonicalTransactionChain.sol | 6 +++--- packages/rollup-contracts/contracts/RollupQueue.sol | 8 ++++---- .../test/rollup-list/CanonicalTransactionChain.spec.ts | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 248b28adb8110..40b54662f1ba2 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -48,7 +48,7 @@ contract CanonicalTransactionChain { } function appendL1ToL2Batch() public { - dt.TimestampedHash memory timestampedHash = l1ToL2Queue.getFrontBatch(); + dt.TimestampedHash memory timestampedHash = l1ToL2Queue.peek(); uint timestamp = timestampedHash.timestamp; bytes32 elementsMerkleRoot = timestampedHash.txHash; uint numElementsInBatch = 1; @@ -75,8 +75,8 @@ contract CanonicalTransactionChain { require(_timestamp + sequencerLivenessAssumption > now, "Cannot submit a batch with a timestamp older than the sequencer liveness assumption."); require(_timestamp <= now, "Cannot submit a batch with a timestamp in the future"); if(!l1ToL2Queue.isEmpty()) { - require(_timestamp <= l1ToL2Queue.ageOfOldestQueuedBatch(), "Must process older queued batches first to enforce timestamp monotonicity"); - require(l1ToL2Queue.ageOfOldestQueuedBatch() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches."); + require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older queued batches first to enforce timestamp monotonicity"); + require(l1ToL2Queue.peekTimestamp() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches."); } require(_timestamp >= latestOVMTimestamp, "Timestamps must monotonically increase"); latestOVMTimestamp = _timestamp; diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index d66f650098e4d..0b56f3aed5783 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -29,13 +29,13 @@ contract RollupQueue { return front >= batches.length; } - function getFrontBatch() public view returns (dt.TimestampedHash memory) { - require(!isEmpty(), "Cannot get front batch from an empty queue"); + function peek() public view returns (dt.TimestampedHash memory) { + require(!isEmpty(), "Queue is empty, no element to peek at"); return batches[front]; } - function ageOfOldestQueuedBatch() public view returns (uint) { - dt.TimestampedHash memory frontBatch = getFrontBatch(); + function peekTimestamp() public view returns (uint) { + dt.TimestampedHash memory frontBatch = peek(); return frontBatch.timestamp; } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index cba4a04416439..6c82cb5374f01 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -110,7 +110,7 @@ describe('CanonicalTransactionChain', () => { const batch = ['0x1234', '0x6578'] await appendBatch(batch) }) - it('should not allow appendTransactionBatch from other address', async () => { + it('should not allow appendTransactionBatch from non-sequencer', async () => { const batch = ['0x1234', '0x6578'] const timestamp = Math.floor(Date.now() / 1000) // Submit the rollup batch on-chain From 72f8422564f7433779f1bbc7af342a96610d2c42 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 21 May 2020 17:16:31 -0400 Subject: [PATCH 18/37] add tests for appendTxBatch timestamp protection --- .../contracts/CanonicalTransactionChain.sol | 4 +- .../CanonicalTransactionChain.spec.ts | 53 ++++++++++++++++--- .../test/rollup-list/RollupQueue.spec.ts | 28 ++++++++++ 3 files changed, 76 insertions(+), 9 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 40b54662f1ba2..728dadac05e41 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -72,11 +72,11 @@ contract CanonicalTransactionChain { function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch"); require(_txBatch.length > 0, "Cannot submit an empty batch"); - require(_timestamp + sequencerLivenessAssumption > now, "Cannot submit a batch with a timestamp older than the sequencer liveness assumption."); + require(_timestamp + sequencerLivenessAssumption > now, "Cannot submit a batch with a timestamp older than the sequencer liveness assumption"); require(_timestamp <= now, "Cannot submit a batch with a timestamp in the future"); if(!l1ToL2Queue.isEmpty()) { require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older queued batches first to enforce timestamp monotonicity"); - require(l1ToL2Queue.peekTimestamp() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches."); + require(l1ToL2Queue.peekTimestamp() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches"); } require(_timestamp >= latestOVMTimestamp, "Timestamps must monotonically increase"); latestOVMTimestamp = _timestamp; diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 6c82cb5374f01..4c6bcbc891284 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -81,18 +81,61 @@ describe('CanonicalTransactionChain', () => { * Test appendTransactionBatch() */ describe('appendTransactionBatch()', async () => { - it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { + it('should not throw when appending a batch from the sequencer', async () => { const batch = ['0x1234', '0x1234'] await appendBatch(batch) }) - it('should throw if submitting an empty batch', async () => { const emptyBatch = [] await appendBatch(emptyBatch).should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit an empty batch' ) }) - + it('should rever if submitting a 10 minute old batch', async () => { + const batch = ['0x1234', '0x1234'] + const timestamp = Math.floor(Date.now() / 1000) + const oldTimestamp = timestamp - 601 + // Submit the rollup batch on-chain + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, oldTimestamp) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp older than the sequencer liveness assumption' + ) + }) + it('should not revert if submitting a 5 minute old batch', async () => { + const batch = ['0x1234', '0x1234'] + const timestamp = Math.floor(Date.now() / 1000) + const oldTimestamp = timestamp - 300 + // Submit the rollup batch on-chain + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, oldTimestamp) + }) + it('should revert if submitting a batch with a future timestamp', async () => { + const batch = ['0x1234', '0x1234'] + const timestamp = Math.floor(Date.now() / 1000) + const futureTimestamp = timestamp + 100 + // Submit the rollup batch on-chain + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, futureTimestamp) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' + ) + }) + it('should revert if submitting a new batch with a timestamp less than latest batch timestamp', async () => { + const batch = ['0x1234', '0x1234'] + const timestamp = await appendBatch(batch) + const oldTimestamp = timestamp - 1 + // Submit the rollup batch on-chain + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, oldTimestamp) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Timestamps must monotonically increase' + ) + }) it('should add to batches array', async () => { const batch = ['0x1234', '0x6578'] await appendBatch(batch) @@ -106,10 +149,6 @@ describe('CanonicalTransactionChain', () => { const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(2) }) - it('should allow appendTransactionBatch from sequencer', async () => { - const batch = ['0x1234', '0x6578'] - await appendBatch(batch) - }) it('should not allow appendTransactionBatch from non-sequencer', async () => { const batch = ['0x1234', '0x6578'] const timestamp = Math.floor(Date.now() / 1000) diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index e4ec1a653db71..4f8ef157073af 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -107,6 +107,8 @@ describe('RollupQueue', () => { timestamp.should.equal(0) const front = await rollupQueue.front() front.should.equal(1) + const isEmpty = await rollupQueue.isEmpty() + isEmpty.should.equal(true) }) it('should dequeue many batches', async () => { @@ -127,6 +129,8 @@ describe('RollupQueue', () => { } const batchesLength = await rollupQueue.getBatchesLength() batchesLength.should.equal(numBatches) + const isEmpty = await rollupQueue.isEmpty() + isEmpty.should.equal(true) }) it('should throw if dequeueing from empty queue', async () => { @@ -153,4 +157,28 @@ describe('RollupQueue', () => { ) }) }) + describe('peek() and peekTimestamp()', async () => { + it('should peek successfully with single element', async () => { + const tx = '0x1234' + const localBatch = await enqueueAndGenerateBatch(tx) + const { txHash, timestamp } = await rollupQueue.peek() + const peekTimestamp = await rollupQueue.peekTimestamp() + const expectedBatchHeaderHash = await localBatch.getMerkleRoot() + txHash.should.equal(expectedBatchHeaderHash) + peekTimestamp.should.equal(timestamp) + timestamp.should.equal(localBatch.timestamp) + }) + it('should revert when peeking at empty queue', async () => { + await rollupQueue + .peek() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Queue is empty, no element to peek at' + ) + await rollupQueue + .peekTimestamp() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Queue is empty, no element to peek at' + ) + }) + }) }) From d92c53c1a2016945bb85b09e63a2dce6660d5e59 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 21 May 2020 18:40:18 -0400 Subject: [PATCH 19/37] add timestamp unit tests --- .../contracts/CanonicalTransactionChain.sol | 3 +- .../CanonicalTransactionChain.spec.ts | 220 +++++++++++------- 2 files changed, 140 insertions(+), 83 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 728dadac05e41..fd72c6ee35773 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -55,7 +55,7 @@ contract CanonicalTransactionChain { if (timestamp + sequencerLivenessAssumption > now) { require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); } - require(timestamp >= latestOVMTimestamp, "Timestamps must be monotonically increasing"); + // require(timestamp >= latestOVMTimestamp, "Timestamps must be monotonically increasing"); latestOVMTimestamp = timestamp; bytes32 batchHeaderHash = keccak256(abi.encodePacked( timestamp, @@ -76,7 +76,6 @@ contract CanonicalTransactionChain { require(_timestamp <= now, "Cannot submit a batch with a timestamp in the future"); if(!l1ToL2Queue.isEmpty()) { require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older queued batches first to enforce timestamp monotonicity"); - require(l1ToL2Queue.peekTimestamp() < sequencerLivenessAssumption, "must process all L1->L2 batches older than liveness assumption before processing L2 batches"); } require(_timestamp >= latestOVMTimestamp, "Timestamps must monotonically increase"); latestOVMTimestamp = _timestamp; diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 4c6bcbc891284..4b0a7dcd02f3a 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -27,30 +27,10 @@ describe('CanonicalTransactionChain', () => { ] = getWallets(provider) let canonicalTxChain let rollupMerkleUtils + let l1ToL2Queue + const localL1ToL2Queue = [] + const LIVENESS_ASSUMPTION = 600 //600 seconds = 10 minutes - /* Link libraries before tests */ - before(async () => { - rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { - gasLimit: 6700000, - }) - }) - - /* Deploy a new RollupChain before each test */ - beforeEach(async () => { - canonicalTxChain = await deployContract( - wallet, - CanonicalTransactionChain, - [ - rollupMerkleUtils.address, - sequencer.address, - l1ToL2TransactionPasser.address, - 600, //600 seconds = 10 min - ], - { - gasLimit: 6700000, - } - ) - }) const appendBatch = async (batch: string[]): Promise => { const timestamp = Math.floor(Date.now() / 1000) // Submit the rollup batch on-chain @@ -59,6 +39,7 @@ describe('CanonicalTransactionChain', () => { .appendTransactionBatch(batch, timestamp) return timestamp } + const appendAndGenerateBatch = async ( batch: string[], batchIndex: number, @@ -77,9 +58,45 @@ describe('CanonicalTransactionChain', () => { return localBatch } - /* - * Test appendTransactionBatch() - */ + const enqueueAndGenerateBatch = async ( + _tx: string + ): Promise => { + // Submit the rollup batch on-chain + const enqueueTx = await l1ToL2Queue + .connect(l1ToL2TransactionPasser) + .enqueueTx(_tx) + const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) + const timestamp = (await provider.getBlock(txReceipt.blockNumber)).timestamp + // Generate a local version of the rollup batch + const localBatch = new RollupQueueBatch(_tx, timestamp) + await localBatch.generateTree() + return localBatch + } + + /* Link libraries before tests */ + before(async () => { + rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { + gasLimit: 6700000, + }) + }) + + /* Deploy a new RollupChain before each test */ + beforeEach(async () => { + canonicalTxChain = await deployContract( + wallet, + CanonicalTransactionChain, + [ + rollupMerkleUtils.address, + sequencer.address, + l1ToL2TransactionPasser.address, + LIVENESS_ASSUMPTION, + ], + { + gasLimit: 6700000, + } + ) + }) + describe('appendTransactionBatch()', async () => { it('should not throw when appending a batch from the sequencer', async () => { const batch = ['0x1234', '0x1234'] @@ -94,7 +111,7 @@ describe('CanonicalTransactionChain', () => { it('should rever if submitting a 10 minute old batch', async () => { const batch = ['0x1234', '0x1234'] const timestamp = Math.floor(Date.now() / 1000) - const oldTimestamp = timestamp - 601 + const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION + 1) // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) @@ -106,7 +123,7 @@ describe('CanonicalTransactionChain', () => { it('should not revert if submitting a 5 minute old batch', async () => { const batch = ['0x1234', '0x1234'] const timestamp = Math.floor(Date.now() / 1000) - const oldTimestamp = timestamp - 300 + const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION / 2) // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) @@ -197,67 +214,108 @@ describe('CanonicalTransactionChain', () => { const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatchs) }) + describe('when the l1ToL2Queue is not empty', async () => { + let localBatch + beforeEach(async () => { + const tx = '0x1234' + const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() + l1ToL2Queue = new Contract( + l1ToL2QueueAddress, + L1ToL2TransactionQueue.abi, + provider + ) + localBatch = await enqueueAndGenerateBatch(tx) + }) + it('should succesfully append a batch with an older timestamp', async () => { + const batch = ['0x1234', '0x6578'] + const oldTimestamp = localBatch.timestamp - 1 + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, oldTimestamp) + }) + it('should succesfully append a batch with an equal timestamp', async () => { + const batch = ['0x1234', '0x6578'] + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, localBatch.timestamp) + }) + it('should revert when appending a block with a newer timestamp', async () => { + const batch = ['0x1234', '0x6578'] + const newTimestamp = localBatch.timestamp + 1 + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, newTimestamp) + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' + ) + }) + }) }) describe('appendL1ToL2Batch()', async () => { - let l1ToL2Queue - const localL1ToL2Queue = [] const tx = '0x1234' - const enqueueAndGenerateBatch = async ( - _tx: string - ): Promise => { - // Submit the rollup batch on-chain - const enqueueTx = await l1ToL2Queue - .connect(l1ToL2TransactionPasser) - .enqueueTx(_tx) - const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) - const timestamp = (await provider.getBlock(txReceipt.blockNumber)) - .timestamp - // Generate a local version of the rollup batch - const localBatch = new RollupQueueBatch(_tx, timestamp) - await localBatch.generateTree() - return localBatch - } - beforeEach(async () => { - const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() - l1ToL2Queue = new Contract( - l1ToL2QueueAddress, - L1ToL2TransactionQueue.abi, - provider - ) - const localBatch = await enqueueAndGenerateBatch(tx) - localL1ToL2Queue.push(localBatch) - }) - it('should successfully dequeue a L1ToL2Batch', async () => { - await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() - const front = await l1ToL2Queue.front() - front.should.equal(1) - const { timestamp, txHash } = await l1ToL2Queue.batches(0) - timestamp.should.equal(0) - txHash.should.equal( - '0x0000000000000000000000000000000000000000000000000000000000000000' - ) - }) - it('should successfully append a L1ToL2Batch', async () => { - const { timestamp, txHash } = await l1ToL2Queue.batches(0) - const localBatch = new DefaultRollupBatch( - timestamp, - true, // isL1ToL2Tx - 0, //batchIndex - 0, // cumulativePrevElements - [tx] // elements - ) - await localBatch.generateTree() - const localBatchHeaderHash = await localBatch.hashBatchHeader() - await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() - const batchHeaderHash = await canonicalTxChain.batches(0) - batchHeaderHash.should.equal(localBatchHeaderHash) + describe('when there is a batch in the L1toL2Queue', async () => { + beforeEach(async () => { + const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() + l1ToL2Queue = new Contract( + l1ToL2QueueAddress, + L1ToL2TransactionQueue.abi, + provider + ) + const localBatch = await enqueueAndGenerateBatch(tx) + localL1ToL2Queue.push(localBatch) + }) + it('should successfully dequeue a L1ToL2Batch', async () => { + await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() + const front = await l1ToL2Queue.front() + front.should.equal(1) + const { timestamp, txHash } = await l1ToL2Queue.batches(0) + timestamp.should.equal(0) + txHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + }) + it('should successfully append a L1ToL2Batch', async () => { + const { timestamp, txHash } = await l1ToL2Queue.batches(0) + const localBatch = new DefaultRollupBatch( + timestamp, + true, // isL1ToL2Tx + 0, //batchIndex + 0, // cumulativePrevElements + [tx] // elements + ) + await localBatch.generateTree() + const localBatchHeaderHash = await localBatch.hashBatchHeader() + await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() + const batchHeaderHash = await canonicalTxChain.batches(0) + batchHeaderHash.should.equal(localBatchHeaderHash) + }) + it('should not allow non-sequencer to appendL1ToL2Batch if less than 10 minutes old', async () => { + await canonicalTxChain + .appendL1ToL2Batch() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to append this batch' + ) + }) + describe('after 10 minutes have elapsed', async () => { + let snapshotID + beforeEach(async () => { + snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + }) + afterEach(async () => { + await provider.send('evm_revert', [snapshotID]) + }) + it('should allow non-sequencer to appendL1ToL2Batch', async () => { + await canonicalTxChain.appendL1ToL2Batch() + }) + }) }) - it('should now allow non-sequencer to appendL1ToL2Batch if less than 10 minutes old', async () => { + it('should revert when L1ToL2TxQueue is empty', async () => { await canonicalTxChain .appendL1ToL2Batch() .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to append this batch' + 'VM Exception while processing transaction: revert Queue is empty, no element to peek at' ) }) }) From bee8426669701bd45f85ec9772f855a150266948 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 21 May 2020 21:30:53 -0400 Subject: [PATCH 20/37] clean up tests --- .../contracts/CanonicalTransactionChain.sol | 23 +++--- .../contracts/RollupQueue.sol | 3 +- .../CanonicalTransactionChain.spec.ts | 2 +- .../test/rollup-list/RLhelper.ts | 5 +- .../test/rollup-list/RollupQueue.spec.ts | 82 +++++++++---------- 5 files changed, 54 insertions(+), 61 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index fd72c6ee35773..0d4404706ee94 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -7,13 +7,13 @@ import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; import {L1ToL2TransactionQueue} from "./L1ToL2TransactionQueue.sol"; contract CanonicalTransactionChain { - RollupMerkleUtils merkleUtils; address public sequencer; + uint public sequencerLivenessAssumption; + RollupMerkleUtils public merkleUtils; + L1ToL2TransactionQueue public l1ToL2Queue; uint public cumulativeNumElements; bytes32[] public batches; - uint public latestOVMTimestamp = 0; - uint sequencerLivenessAssumption; - L1ToL2TransactionQueue public l1ToL2Queue; + uint public lastOVMTimestamp; constructor( address _rollupMerkleUtilsAddress, @@ -25,6 +25,7 @@ contract CanonicalTransactionChain { sequencer = _sequencer; l1ToL2Queue = new L1ToL2TransactionQueue(_rollupMerkleUtilsAddress, _l1ToL2TransactionPasserAddress, address(this)); sequencerLivenessAssumption =_sequencerLivenessAssumption; + lastOVMTimestamp = 0; } function getBatchesLength() public view returns (uint) { @@ -50,13 +51,12 @@ contract CanonicalTransactionChain { function appendL1ToL2Batch() public { dt.TimestampedHash memory timestampedHash = l1ToL2Queue.peek(); uint timestamp = timestampedHash.timestamp; - bytes32 elementsMerkleRoot = timestampedHash.txHash; - uint numElementsInBatch = 1; if (timestamp + sequencerLivenessAssumption > now) { require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); } - // require(timestamp >= latestOVMTimestamp, "Timestamps must be monotonically increasing"); - latestOVMTimestamp = timestamp; + lastOVMTimestamp = timestamp; + bytes32 elementsMerkleRoot = timestampedHash.txHash; + uint numElementsInBatch = 1; bytes32 batchHeaderHash = keccak256(abi.encodePacked( timestamp, true, // isL1ToL2Tx @@ -65,7 +65,7 @@ contract CanonicalTransactionChain { cumulativeNumElements // cumulativePrevElements )); batches.push(batchHeaderHash); - cumulativeNumElements += numElementsInBatch; // add a single tx + cumulativeNumElements += numElementsInBatch; l1ToL2Queue.dequeueBatch(); } @@ -77,8 +77,8 @@ contract CanonicalTransactionChain { if(!l1ToL2Queue.isEmpty()) { require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older queued batches first to enforce timestamp monotonicity"); } - require(_timestamp >= latestOVMTimestamp, "Timestamps must monotonically increase"); - latestOVMTimestamp = _timestamp; + require(_timestamp >= lastOVMTimestamp, "Timestamps must monotonically increase"); + lastOVMTimestamp = _timestamp; bytes32 batchHeaderHash = keccak256(abi.encodePacked( _timestamp, false, // isL1ToL2Tx @@ -102,7 +102,6 @@ contract CanonicalTransactionChain { if(_position != _inclusionProof.indexInBatch + batchHeader.cumulativePrevElements) return false; - // verify elementsMerkleRoot if (!merkleUtils.verify( batchHeader.elementsMerkleRoot, diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index 0b56f3aed5783..1460385881dd8 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -20,7 +20,7 @@ contract RollupQueue { merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); front = 0; } - // for testing: returns length of batch list + function getBatchesLength() public view returns (uint) { return batches.length; } @@ -42,7 +42,6 @@ contract RollupQueue { function authenticateEnqueue(address _sender) public view returns (bool) { return true; } function authenticateDequeue(address _sender) public view returns (bool) { return true; } - // enqueues to the end of the current queue of batches function enqueueTx(bytes memory _tx) public { require(authenticateEnqueue(msg.sender), "Message sender does not have permission to enqueue"); dt.TimestampedHash memory timestampedHash = dt.TimestampedHash( diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 4b0a7dcd02f3a..2c9d45f972e2f 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -123,7 +123,7 @@ describe('CanonicalTransactionChain', () => { it('should not revert if submitting a 5 minute old batch', async () => { const batch = ['0x1234', '0x1234'] const timestamp = Math.floor(Date.now() / 1000) - const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION / 2) + const oldTimestamp = timestamp - LIVENESS_ASSUMPTION / 2 // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 76c5fb2d2d0e8..68a81476ffc56 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -132,11 +132,11 @@ export class DefaultRollupBatch { } /* * Helper class which provides all information requried for a particular - * Rollup batch. This includes all of the tranisitions in readable form + * Rollup Queue Batch. This includes all of the transactions in readable form * as well as the merkle tree which it generates. */ export class RollupQueueBatch { - public elements: string[] //Rollup batch + public elements: string[] public elementsMerkleTree: SparseMerkleTreeImpl public timestamp: number @@ -148,7 +148,6 @@ export class RollupQueueBatch { * Generate the elements merkle tree from this.elements */ public async generateTree(): Promise { - // Create a tree! const treeHeight = Math.ceil(Math.log2(this.elements.length)) + 1 // The height should actually not be plus 1 this.elementsMerkleTree = await SparseMerkleTreeImpl.create( newInMemoryDB(), diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 4f8ef157073af..1a6b772b53c38 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -14,24 +14,21 @@ const log = getLogger('rollup-queue', true) import * as RollupQueue from '../../build/RollupQueue.json' import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' -/* Begin tests */ describe('RollupQueue', () => { const provider = createMockProvider() - const [wallet1, wallet2] = getWallets(provider) + const [wallet] = getWallets(provider) let rollupQueue let rollupMerkleUtils + const defaultTx = '0x1234' - /* Link libraries before tests */ before(async () => { - rollupMerkleUtils = await deployContract(wallet1, RollupMerkleUtils, [], { + rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { gasLimit: 6700000, }) }) - - /* Deploy a new RollupChain before each test */ beforeEach(async () => { rollupQueue = await deployContract( - wallet1, + wallet, RollupQueue, [rollupMerkleUtils.address], { @@ -52,23 +49,18 @@ describe('RollupQueue', () => { await localBatch.generateTree() return localBatch } - /* - * Test enqueueTx() - */ + describe('enqueueTx() ', async () => { it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { - const tx = '0x1234' - await rollupQueue.enqueueTx(tx) // Did not throw... success! + await rollupQueue.enqueueTx(defaultTx) }) it('should add to batches array', async () => { - const tx = '0x1234' - const output = await rollupQueue.enqueueTx(tx) + await rollupQueue.enqueueTx(defaultTx) const batchesLength = await rollupQueue.getBatchesLength() batchesLength.toNumber().should.equal(1) }) - it('should calculate set the TimestampedHash correctly', async () => { - const tx = '0x1234' - const localBatch = await enqueueAndGenerateBatch(tx) + it('should set the TimestampedHash correctly', async () => { + const localBatch = await enqueueAndGenerateBatch(defaultTx) const { txHash, timestamp } = await rollupQueue.batches(0) const expectedBatchHeaderHash = await localBatch.getMerkleRoot() txHash.should.equal(expectedBatchHeaderHash) @@ -76,13 +68,12 @@ describe('RollupQueue', () => { }) it('should add multiple batches correctly', async () => { - const tx = '0x1234' - const numBatches = 10 - for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const localBatch = await enqueueAndGenerateBatch(tx) - const { txHash, timestamp } = await rollupQueue.batches(batchIndex) - const expectedBatchHeaderHash = await localBatch.getMerkleRoot() - txHash.should.equal(expectedBatchHeaderHash) + const numBatches = 5 + for (let i= 0; i < numBatches; i++) { + const localBatch = await enqueueAndGenerateBatch(defaultTx) + const { txHash, timestamp } = await rollupQueue.batches(i) + const expectedTxHash = await localBatch.getMerkleRoot() + txHash.should.equal(expectedTxHash) timestamp.should.equal(localBatch.timestamp) } //check batches length @@ -93,9 +84,7 @@ describe('RollupQueue', () => { describe('dequeueBatch()', async () => { it('should dequeue single batch', async () => { - const tx = '0x1234' - const localBatch = await enqueueAndGenerateBatch(tx) - // delete the single appended batch + const localBatch = await enqueueAndGenerateBatch(defaultTx) await rollupQueue.dequeueBatch() const batchesLength = await rollupQueue.getBatchesLength() @@ -112,20 +101,29 @@ describe('RollupQueue', () => { }) it('should dequeue many batches', async () => { - const tx = '0x1234' const numBatches = 5 + const localBatches = [] for (let i = 0; i < numBatches; i++) { - await enqueueAndGenerateBatch(tx) + const localBatch = await enqueueAndGenerateBatch(defaultTx) + localBatches.push(localBatch) } for (let i = 0; i < numBatches; i++) { + const frontBatch = await rollupQueue.peek() + const localFrontBatch = localBatches[i] + const expectedTxHash = await localFrontBatch.getMerkleRoot() + frontBatch.txHash.should.equal(expectedTxHash) + frontBatch.timestamp.should.equal(localFrontBatch.timestamp) + await rollupQueue.dequeueBatch() + const front = await rollupQueue.front() front.should.equal(i + 1) - const { txHash, timestamp } = await rollupQueue.batches(i) - txHash.should.equal( + + const dequeuedBatch = await rollupQueue.batches(i) + dequeuedBatch.txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) - timestamp.should.equal(0) + dequeuedBatch.timestamp.should.equal(0) } const batchesLength = await rollupQueue.getBatchesLength() batchesLength.should.equal(numBatches) @@ -133,7 +131,7 @@ describe('RollupQueue', () => { isEmpty.should.equal(true) }) - it('should throw if dequeueing from empty queue', async () => { + it('should revert if dequeueing from empty queue', async () => { await rollupQueue .dequeueBatch() .should.be.revertedWith( @@ -141,13 +139,10 @@ describe('RollupQueue', () => { ) }) - it('should throw if dequeueing from a once populated, now empty queue', async () => { - const tx = '0x1234' + it('should revert if dequeueing from a once populated, now empty queue', async () => { const numBatches = 3 for (let i = 0; i < numBatches; i++) { - await enqueueAndGenerateBatch(tx) - } - for (let i = 0; i < numBatches; i++) { + await enqueueAndGenerateBatch(defaultTx) await rollupQueue.dequeueBatch() } await rollupQueue @@ -159,16 +154,17 @@ describe('RollupQueue', () => { }) describe('peek() and peekTimestamp()', async () => { it('should peek successfully with single element', async () => { - const tx = '0x1234' - const localBatch = await enqueueAndGenerateBatch(tx) + const localBatch = await enqueueAndGenerateBatch(defaultTx) const { txHash, timestamp } = await rollupQueue.peek() - const peekTimestamp = await rollupQueue.peekTimestamp() const expectedBatchHeaderHash = await localBatch.getMerkleRoot() txHash.should.equal(expectedBatchHeaderHash) - peekTimestamp.should.equal(timestamp) timestamp.should.equal(localBatch.timestamp) + + const peekTimestamp = await rollupQueue.peekTimestamp() + peekTimestamp.should.equal(timestamp) }) - it('should revert when peeking at empty queue', async () => { + + it('should revert when peeking at an empty queue', async () => { await rollupQueue .peek() .should.be.revertedWith( From 81f55573dd043f0d3305d3372288aa72837e7702 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 21 May 2020 21:52:09 -0400 Subject: [PATCH 21/37] clean up canonicaltxChain tests --- .../CanonicalTransactionChain.spec.ts | 50 +++++++------------ .../L1ToL2TransactionQueue.spec.ts | 16 ++---- .../test/rollup-list/RollupQueue.spec.ts | 2 +- 3 files changed, 25 insertions(+), 43 deletions(-) diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 2c9d45f972e2f..35e37c3b796a5 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -30,6 +30,7 @@ describe('CanonicalTransactionChain', () => { let l1ToL2Queue const localL1ToL2Queue = [] const LIVENESS_ASSUMPTION = 600 //600 seconds = 10 minutes + const defaultBatch = ['0x1234', '0x5678'] const appendBatch = async (batch: string[]): Promise => { const timestamp = Math.floor(Date.now() / 1000) @@ -99,8 +100,7 @@ describe('CanonicalTransactionChain', () => { describe('appendTransactionBatch()', async () => { it('should not throw when appending a batch from the sequencer', async () => { - const batch = ['0x1234', '0x1234'] - await appendBatch(batch) + await appendBatch(defaultBatch) }) it('should throw if submitting an empty batch', async () => { const emptyBatch = [] @@ -109,79 +109,69 @@ describe('CanonicalTransactionChain', () => { ) }) it('should rever if submitting a 10 minute old batch', async () => { - const batch = ['0x1234', '0x1234'] const timestamp = Math.floor(Date.now() / 1000) const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION + 1) - // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, oldTimestamp) + .appendTransactionBatch(defaultBatch, oldTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp older than the sequencer liveness assumption' ) }) it('should not revert if submitting a 5 minute old batch', async () => { - const batch = ['0x1234', '0x1234'] const timestamp = Math.floor(Date.now() / 1000) const oldTimestamp = timestamp - LIVENESS_ASSUMPTION / 2 - // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, oldTimestamp) + .appendTransactionBatch(defaultBatch, oldTimestamp) }) it('should revert if submitting a batch with a future timestamp', async () => { - const batch = ['0x1234', '0x1234'] const timestamp = Math.floor(Date.now() / 1000) const futureTimestamp = timestamp + 100 // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, futureTimestamp) + .appendTransactionBatch(defaultBatch, futureTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' ) }) it('should revert if submitting a new batch with a timestamp less than latest batch timestamp', async () => { - const batch = ['0x1234', '0x1234'] - const timestamp = await appendBatch(batch) + const timestamp = await appendBatch(defaultBatch) const oldTimestamp = timestamp - 1 // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, oldTimestamp) + .appendTransactionBatch(defaultBatch, oldTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Timestamps must monotonically increase' ) }) it('should add to batches array', async () => { - const batch = ['0x1234', '0x6578'] - await appendBatch(batch) + await appendBatch(defaultBatch) const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(1) }) it('should update cumulativeNumElements correctly', async () => { - const batch = ['0x1234', '0x5678'] - await appendBatch(batch) + await appendBatch(defaultBatch) const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(2) }) it('should not allow appendTransactionBatch from non-sequencer', async () => { - const batch = ['0x1234', '0x6578'] const timestamp = Math.floor(Date.now() / 1000) // Submit the rollup batch on-chain await canonicalTxChain - .appendTransactionBatch(batch, timestamp) + .appendTransactionBatch(defaultBatch, timestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to append a batch' ) }) it('should calculate batchHeaderHash correctly', async () => { - const batch = ['0x1234', '0x5678'] const batchIndex = 0 const cumulativePrevElements = 0 const localBatch = await appendAndGenerateBatch( - batch, + defaultBatch, batchIndex, cumulativePrevElements ) @@ -191,12 +181,11 @@ describe('CanonicalTransactionChain', () => { calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) }) it('should add multiple batches correctly', async () => { - const batch = ['0x1234', '0x5678'] const numBatchs = 10 for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { - const cumulativePrevElements = batch.length * batchIndex + const cumulativePrevElements = defaultBatch.length * batchIndex const localBatch = await appendAndGenerateBatch( - batch, + defaultBatch, batchIndex, cumulativePrevElements ) @@ -209,7 +198,9 @@ describe('CanonicalTransactionChain', () => { } //check cumulativeNumElements const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() - cumulativeNumElements.toNumber().should.equal(numBatchs * batch.length) + cumulativeNumElements + .toNumber() + .should.equal(numBatchs * defaultBatch.length) //check batches length const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatchs) @@ -227,24 +218,21 @@ describe('CanonicalTransactionChain', () => { localBatch = await enqueueAndGenerateBatch(tx) }) it('should succesfully append a batch with an older timestamp', async () => { - const batch = ['0x1234', '0x6578'] const oldTimestamp = localBatch.timestamp - 1 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, oldTimestamp) + .appendTransactionBatch(defaultBatch, oldTimestamp) }) it('should succesfully append a batch with an equal timestamp', async () => { - const batch = ['0x1234', '0x6578'] await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, localBatch.timestamp) + .appendTransactionBatch(defaultBatch, localBatch.timestamp) }) it('should revert when appending a block with a newer timestamp', async () => { - const batch = ['0x1234', '0x6578'] const newTimestamp = localBatch.timestamp + 1 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, newTimestamp) + .appendTransactionBatch(defaultBatch, newTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' ) diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index 8c3e75f0d9a45..78661ad2facec 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -4,9 +4,6 @@ import '../setup' import { getLogger } from '@eth-optimism/core-utils' import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' -/* Internal Imports */ -import { DefaultRollupBatch } from './RLhelper' - /* Logging */ const log = getLogger('l1-to-l2-tx-queue', true) @@ -22,6 +19,7 @@ describe('L1ToL2TransactionQueue', () => { l1ToL2TransactionPasser, canonicalTransactionChain, ] = getWallets(provider) + const defaultTx = '0x1234' let l1ToL2TxQueue let rollupMerkleUtils @@ -50,15 +48,13 @@ describe('L1ToL2TransactionQueue', () => { describe('enqueueBatch() ', async () => { it('should allow enqueue from l1ToL2TransactionPasser', async () => { - const tx = '0x1234' - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(tx) // Did not throw... success! + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(defaultTx) // Did not throw... success! const batchesLength = await l1ToL2TxQueue.getBatchesLength() batchesLength.should.equal(1) }) it('should not allow enqueue from other address', async () => { - const tx = '0x1234' await l1ToL2TxQueue - .enqueueTx(tx) + .enqueueTx(defaultTx) .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to enqueue' ) @@ -67,8 +63,7 @@ describe('L1ToL2TransactionQueue', () => { describe('dequeueBatch() ', async () => { it('should allow dequeue from canonicalTransactionChain', async () => { - const tx = '0x1234' - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(tx) + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(defaultTx) await l1ToL2TxQueue.connect(canonicalTransactionChain).dequeueBatch() const batchesLength = await l1ToL2TxQueue.getBatchesLength() batchesLength.should.equal(1) @@ -81,8 +76,7 @@ describe('L1ToL2TransactionQueue', () => { front.should.equal(1) }) it('should not allow dequeue from other address', async () => { - const tx = '0x1234' - await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(tx) + await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(defaultTx) await l1ToL2TxQueue .dequeueBatch() .should.be.revertedWith( diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 1a6b772b53c38..3d1697fec40c3 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -69,7 +69,7 @@ describe('RollupQueue', () => { it('should add multiple batches correctly', async () => { const numBatches = 5 - for (let i= 0; i < numBatches; i++) { + for (let i = 0; i < numBatches; i++) { const localBatch = await enqueueAndGenerateBatch(defaultTx) const { txHash, timestamp } = await rollupQueue.batches(i) const expectedTxHash = await localBatch.getMerkleRoot() From b2c3014f79a2469886b3627834fa6675239079c2 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Fri, 22 May 2020 00:53:17 -0400 Subject: [PATCH 22/37] clean up neaming, add verifyElement test --- .../contracts/CanonicalTransactionChain.sol | 2 +- .../contracts/RollupQueue.sol | 20 +-- .../CanonicalTransactionChain.spec.ts | 154 ++++++++---------- .../L1ToL2TransactionQueue.spec.ts | 12 +- .../test/rollup-list/RLhelper.ts | 12 +- .../test/rollup-list/RollupQueue.spec.ts | 49 +++--- 6 files changed, 111 insertions(+), 138 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 0d4404706ee94..8fd3058302c9d 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -66,7 +66,7 @@ contract CanonicalTransactionChain { )); batches.push(batchHeaderHash); cumulativeNumElements += numElementsInBatch; - l1ToL2Queue.dequeueBatch(); + l1ToL2Queue.dequeue(); } function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { diff --git a/packages/rollup-contracts/contracts/RollupQueue.sol b/packages/rollup-contracts/contracts/RollupQueue.sol index 1460385881dd8..efea20a442aa5 100644 --- a/packages/rollup-contracts/contracts/RollupQueue.sol +++ b/packages/rollup-contracts/contracts/RollupQueue.sol @@ -7,7 +7,7 @@ import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; contract RollupQueue { // List of batch header hashes - dt.TimestampedHash[] public batches; + dt.TimestampedHash[] public batchHeaders; uint256 public front; //Index of the first batchHeaderHash in the list // The Rollup Merkle Tree library (currently a contract for ease of testing) @@ -21,17 +21,17 @@ contract RollupQueue { front = 0; } - function getBatchesLength() public view returns (uint) { - return batches.length; + function getBatchHeadersLength() public view returns (uint) { + return batchHeaders.length; } function isEmpty() public view returns (bool) { - return front >= batches.length; + return front >= batchHeaders.length; } function peek() public view returns (dt.TimestampedHash memory) { require(!isEmpty(), "Queue is empty, no element to peek at"); - return batches[front]; + return batchHeaders[front]; } function peekTimestamp() public view returns (uint) { @@ -48,15 +48,13 @@ contract RollupQueue { now, keccak256(_tx) ); - batches.push(timestampedHash); + batchHeaders.push(timestampedHash); } - // dequeues the first (oldest) batch - // Note: keep in mind that front can point to a non-existent batch if the list is empty. - function dequeueBatch() public { + function dequeue() public { require(authenticateDequeue(msg.sender), "Message sender does not have permission to dequeue"); - require(front < batches.length, "Cannot dequeue from an empty queue"); - delete batches[front]; + require(front < batchHeaders.length, "Cannot dequeue from an empty queue"); + delete batchHeaders[front]; front++; } } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 35e37c3b796a5..3ac82fa704ca0 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -9,7 +9,7 @@ import { Contract } from 'ethers' import { DefaultRollupBatch, RollupQueueBatch } from './RLhelper' /* Logging */ -const log = getLogger('rollup-tx-queue', true) +const log = getLogger('canonical-tx-chain', true) /* Contract Imports */ import * as CanonicalTransactionChain from '../../build/CanonicalTransactionChain.json' @@ -30,7 +30,8 @@ describe('CanonicalTransactionChain', () => { let l1ToL2Queue const localL1ToL2Queue = [] const LIVENESS_ASSUMPTION = 600 //600 seconds = 10 minutes - const defaultBatch = ['0x1234', '0x5678'] + const DEFAULT_BATCH = ['0x1234', '0x5678'] + const DEFAULT_TX = '0x1234' const appendBatch = async (batch: string[]): Promise => { const timestamp = Math.floor(Date.now() / 1000) @@ -43,8 +44,8 @@ describe('CanonicalTransactionChain', () => { const appendAndGenerateBatch = async ( batch: string[], - batchIndex: number, - cumulativePrevElements: number + batchIndex: number = 0, + cumulativePrevElements: number = 0 ): Promise => { const timestamp = await appendBatch(batch) // Generate a local version of the rollup batch @@ -96,11 +97,17 @@ describe('CanonicalTransactionChain', () => { gasLimit: 6700000, } ) + const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() + l1ToL2Queue = new Contract( + l1ToL2QueueAddress, + L1ToL2TransactionQueue.abi, + provider + ) }) describe('appendTransactionBatch()', async () => { it('should not throw when appending a batch from the sequencer', async () => { - await appendBatch(defaultBatch) + await appendBatch(DEFAULT_BATCH) }) it('should throw if submitting an empty batch', async () => { const emptyBatch = [] @@ -113,7 +120,7 @@ describe('CanonicalTransactionChain', () => { const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION + 1) await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(defaultBatch, oldTimestamp) + .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp older than the sequencer liveness assumption' ) @@ -123,7 +130,7 @@ describe('CanonicalTransactionChain', () => { const oldTimestamp = timestamp - LIVENESS_ASSUMPTION / 2 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(defaultBatch, oldTimestamp) + .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) }) it('should revert if submitting a batch with a future timestamp', async () => { const timestamp = Math.floor(Date.now() / 1000) @@ -131,30 +138,30 @@ describe('CanonicalTransactionChain', () => { // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(defaultBatch, futureTimestamp) + .appendTransactionBatch(DEFAULT_BATCH, futureTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' ) }) it('should revert if submitting a new batch with a timestamp less than latest batch timestamp', async () => { - const timestamp = await appendBatch(defaultBatch) + const timestamp = await appendBatch(DEFAULT_BATCH) const oldTimestamp = timestamp - 1 // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(defaultBatch, oldTimestamp) + .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Timestamps must monotonically increase' ) }) it('should add to batches array', async () => { - await appendBatch(defaultBatch) + await appendBatch(DEFAULT_BATCH) const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(1) }) it('should update cumulativeNumElements correctly', async () => { - await appendBatch(defaultBatch) + await appendBatch(DEFAULT_BATCH) const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(2) }) @@ -162,20 +169,13 @@ describe('CanonicalTransactionChain', () => { const timestamp = Math.floor(Date.now() / 1000) // Submit the rollup batch on-chain await canonicalTxChain - .appendTransactionBatch(defaultBatch, timestamp) + .appendTransactionBatch(DEFAULT_BATCH, timestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to append a batch' ) }) it('should calculate batchHeaderHash correctly', async () => { - const batchIndex = 0 - const cumulativePrevElements = 0 - const localBatch = await appendAndGenerateBatch( - defaultBatch, - batchIndex, - cumulativePrevElements - ) - //Check batchHeaderHash + const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await canonicalTxChain.batches(0) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) @@ -183,56 +183,46 @@ describe('CanonicalTransactionChain', () => { it('should add multiple batches correctly', async () => { const numBatchs = 10 for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { - const cumulativePrevElements = defaultBatch.length * batchIndex + const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex const localBatch = await appendAndGenerateBatch( - defaultBatch, + DEFAULT_BATCH, batchIndex, cumulativePrevElements ) - //Check batchHeaderHash const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await canonicalTxChain.batches( batchIndex ) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) } - //check cumulativeNumElements const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements .toNumber() - .should.equal(numBatchs * defaultBatch.length) - //check batches length + .should.equal(numBatchs * DEFAULT_BATCH.length) const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatchs) }) describe('when the l1ToL2Queue is not empty', async () => { let localBatch beforeEach(async () => { - const tx = '0x1234' - const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() - l1ToL2Queue = new Contract( - l1ToL2QueueAddress, - L1ToL2TransactionQueue.abi, - provider - ) - localBatch = await enqueueAndGenerateBatch(tx) + localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) }) it('should succesfully append a batch with an older timestamp', async () => { const oldTimestamp = localBatch.timestamp - 1 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(defaultBatch, oldTimestamp) + .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) }) it('should succesfully append a batch with an equal timestamp', async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(defaultBatch, localBatch.timestamp) + .appendTransactionBatch(DEFAULT_BATCH, localBatch.timestamp) }) it('should revert when appending a block with a newer timestamp', async () => { const newTimestamp = localBatch.timestamp + 1 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(defaultBatch, newTimestamp) + .appendTransactionBatch(DEFAULT_BATCH, newTimestamp) .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' ) @@ -241,36 +231,29 @@ describe('CanonicalTransactionChain', () => { }) describe('appendL1ToL2Batch()', async () => { - const tx = '0x1234' describe('when there is a batch in the L1toL2Queue', async () => { beforeEach(async () => { - const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() - l1ToL2Queue = new Contract( - l1ToL2QueueAddress, - L1ToL2TransactionQueue.abi, - provider - ) - const localBatch = await enqueueAndGenerateBatch(tx) + const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) localL1ToL2Queue.push(localBatch) }) it('should successfully dequeue a L1ToL2Batch', async () => { await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() const front = await l1ToL2Queue.front() front.should.equal(1) - const { timestamp, txHash } = await l1ToL2Queue.batches(0) + const { timestamp, txHash } = await l1ToL2Queue.batchHeaders(0) timestamp.should.equal(0) txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) }) it('should successfully append a L1ToL2Batch', async () => { - const { timestamp, txHash } = await l1ToL2Queue.batches(0) + const { timestamp, txHash } = await l1ToL2Queue.batchHeaders(0) const localBatch = new DefaultRollupBatch( timestamp, true, // isL1ToL2Tx 0, //batchIndex 0, // cumulativePrevElements - [tx] // elements + [DEFAULT_TX] // elements ) await localBatch.generateTree() const localBatchHeaderHash = await localBatch.hashBatchHeader() @@ -309,48 +292,47 @@ describe('CanonicalTransactionChain', () => { }) describe('verifyElement() ', async () => { - it('should return true for valid elements for different batchIndexes', async () => { - const maxBatchNumber = 5 - const minBatchNumber = 0 - const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - for ( - let batchIndex = minBatchNumber; - batchIndex < maxBatchNumber + 1; - batchIndex++ - ) { - const timestamp = batchIndex + it('should return true for valid elements for different batches and elements', async () => { + const numBatches = 3 + const batch = [ + '0x1234', + '0x4567', + '0x890a', + '0x4567', + '0x890a', + '0xabcd', + '0x1234', + ] + for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { const cumulativePrevElements = batch.length * batchIndex const localBatch = await appendAndGenerateBatch( batch, batchIndex, cumulativePrevElements ) - // Create inclusion proof for the element at elementIndex - const elementIndex = 3 - const element = batch[elementIndex] - const position = localBatch.getPosition(elementIndex) - const elementInclusionProof = await localBatch.getElementInclusionProof( - elementIndex - ) - const isIncluded = await canonicalTxChain.verifyElement( - element, - position, - elementInclusionProof - ) - isIncluded.should.equal(true) + for ( + let elementIndex = 0; + elementIndex < batch.length; + elementIndex += 3 + ) { + const element = batch[elementIndex] + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( + elementIndex + ) + const isIncluded = await canonicalTxChain.verifyElement( + element, + position, + elementInclusionProof + ) + isIncluded.should.equal(true) + } } }) it('should return false for wrong position with wrong indexInBatch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const cumulativePrevElements = 0 - const batchIndex = 0 - const timestamp = 0 - const localBatch = await appendAndGenerateBatch( - batch, - batchIndex, - cumulativePrevElements - ) + const localBatch = await appendAndGenerateBatch(batch) const elementIndex = 1 const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex) @@ -368,16 +350,8 @@ describe('CanonicalTransactionChain', () => { }) it('should return false for wrong position and matching indexInBatch', async () => { - const batch = ['0x1234', '0x4567', '0x890a', '0xabcd'] - const cumulativePrevElements = 0 - const batchIndex = 0 - const timestamp = 0 - const localBatch = await appendAndGenerateBatch( - batch, - batchIndex, - cumulativePrevElements - ) - //generate inclusion proof + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const localBatch = await appendAndGenerateBatch(batch) const elementIndex = 1 const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex) diff --git a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts index 78661ad2facec..96de0a08840b4 100644 --- a/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/L1ToL2TransactionQueue.spec.ts @@ -49,7 +49,7 @@ describe('L1ToL2TransactionQueue', () => { describe('enqueueBatch() ', async () => { it('should allow enqueue from l1ToL2TransactionPasser', async () => { await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(defaultTx) // Did not throw... success! - const batchesLength = await l1ToL2TxQueue.getBatchesLength() + const batchesLength = await l1ToL2TxQueue.getBatchHeadersLength() batchesLength.should.equal(1) }) it('should not allow enqueue from other address', async () => { @@ -61,13 +61,13 @@ describe('L1ToL2TransactionQueue', () => { }) }) - describe('dequeueBatch() ', async () => { + describe('dequeue() ', async () => { it('should allow dequeue from canonicalTransactionChain', async () => { await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(defaultTx) - await l1ToL2TxQueue.connect(canonicalTransactionChain).dequeueBatch() - const batchesLength = await l1ToL2TxQueue.getBatchesLength() + await l1ToL2TxQueue.connect(canonicalTransactionChain).dequeue() + const batchesLength = await l1ToL2TxQueue.getBatchHeadersLength() batchesLength.should.equal(1) - const { txHash, timestamp } = await l1ToL2TxQueue.batches(0) + const { txHash, timestamp } = await l1ToL2TxQueue.batchHeaders(0) txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) @@ -78,7 +78,7 @@ describe('L1ToL2TransactionQueue', () => { it('should not allow dequeue from other address', async () => { await l1ToL2TxQueue.connect(l1ToL2TransactionPasser).enqueueTx(defaultTx) await l1ToL2TxQueue - .dequeueBatch() + .dequeue() .should.be.revertedWith( 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' ) diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 68a81476ffc56..6cf2ff42238e5 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -12,7 +12,7 @@ import { utils } from 'ethers' interface TxChainBatchHeader { timestamp: number - isL1ToL2Tx: boolean + isL1ToL2: boolean elementsMerkleRoot: string numElementsInBatch: number cumulativePrevElements: number @@ -32,7 +32,7 @@ interface ElementInclusionProof { */ export class DefaultRollupBatch { public timestamp: number - public isL1ToL2Tx: boolean + public isL1ToL2: boolean public batchIndex: number //index in public cumulativePrevElements: number //in batchHeader public elements: string[] //Rollup batch @@ -40,12 +40,12 @@ export class DefaultRollupBatch { constructor( timestamp: number, // Ethereum batch this batch was submitted in - isL1ToL2Tx: boolean, + isL1ToL2: boolean, batchIndex: number, // index in batchs array (first batch has batchIndex of 0) cumulativePrevElements: number, elements: string[] ) { - this.isL1ToL2Tx = isL1ToL2Tx + this.isL1ToL2 = isL1ToL2 this.timestamp = timestamp this.batchIndex = batchIndex this.cumulativePrevElements = cumulativePrevElements @@ -99,7 +99,7 @@ export class DefaultRollupBatch { ['uint', 'bool', 'bytes32', 'uint', 'uint'], [ this.timestamp, - this.isL1ToL2Tx, + this.isL1ToL2, bufToHexString(bufferRoot), this.elements.length, this.cumulativePrevElements, @@ -120,7 +120,7 @@ export class DefaultRollupBatch { batchIndex: this.batchIndex, batchHeader: { timestamp: this.timestamp, - isL1ToL2Tx: this.isL1ToL2Tx, + isL1ToL2: this.isL1ToL2, elementsMerkleRoot: bufToHexString(bufferRoot), numElementsInBatch: this.elements.length, cumulativePrevElements: this.cumulativePrevElements, diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 3d1697fec40c3..dd74eac461175 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -14,12 +14,13 @@ const log = getLogger('rollup-queue', true) import * as RollupQueue from '../../build/RollupQueue.json' import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' +const DEFAULT_TX = '0x1234' + describe('RollupQueue', () => { const provider = createMockProvider() const [wallet] = getWallets(provider) let rollupQueue let rollupMerkleUtils - const defaultTx = '0x1234' before(async () => { rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { @@ -52,16 +53,16 @@ describe('RollupQueue', () => { describe('enqueueTx() ', async () => { it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { - await rollupQueue.enqueueTx(defaultTx) + await rollupQueue.enqueueTx(DEFAULT_TX) }) - it('should add to batches array', async () => { - await rollupQueue.enqueueTx(defaultTx) - const batchesLength = await rollupQueue.getBatchesLength() + it('should add to batchHeaders array', async () => { + await rollupQueue.enqueueTx(DEFAULT_TX) + const batchesLength = await rollupQueue.getBatchHeadersLength() batchesLength.toNumber().should.equal(1) }) it('should set the TimestampedHash correctly', async () => { - const localBatch = await enqueueAndGenerateBatch(defaultTx) - const { txHash, timestamp } = await rollupQueue.batches(0) + const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) + const { txHash, timestamp } = await rollupQueue.batchHeaders(0) const expectedBatchHeaderHash = await localBatch.getMerkleRoot() txHash.should.equal(expectedBatchHeaderHash) timestamp.should.equal(localBatch.timestamp) @@ -70,26 +71,26 @@ describe('RollupQueue', () => { it('should add multiple batches correctly', async () => { const numBatches = 5 for (let i = 0; i < numBatches; i++) { - const localBatch = await enqueueAndGenerateBatch(defaultTx) - const { txHash, timestamp } = await rollupQueue.batches(i) + const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) + const { txHash, timestamp } = await rollupQueue.batchHeaders(i) const expectedTxHash = await localBatch.getMerkleRoot() txHash.should.equal(expectedTxHash) timestamp.should.equal(localBatch.timestamp) } //check batches length - const batchesLength = await rollupQueue.getBatchesLength() + const batchesLength = await rollupQueue.getBatchHeadersLength() batchesLength.toNumber().should.equal(numBatches) }) }) - describe('dequeueBatch()', async () => { + describe('dequeue()', async () => { it('should dequeue single batch', async () => { - const localBatch = await enqueueAndGenerateBatch(defaultTx) - await rollupQueue.dequeueBatch() + const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) + await rollupQueue.dequeue() - const batchesLength = await rollupQueue.getBatchesLength() + const batchesLength = await rollupQueue.getBatchHeadersLength() batchesLength.should.equal(1) - const { txHash, timestamp } = await rollupQueue.batches(0) + const { txHash, timestamp } = await rollupQueue.batchHeaders(0) txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) @@ -104,7 +105,7 @@ describe('RollupQueue', () => { const numBatches = 5 const localBatches = [] for (let i = 0; i < numBatches; i++) { - const localBatch = await enqueueAndGenerateBatch(defaultTx) + const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) localBatches.push(localBatch) } for (let i = 0; i < numBatches; i++) { @@ -114,18 +115,18 @@ describe('RollupQueue', () => { frontBatch.txHash.should.equal(expectedTxHash) frontBatch.timestamp.should.equal(localFrontBatch.timestamp) - await rollupQueue.dequeueBatch() + await rollupQueue.dequeue() const front = await rollupQueue.front() front.should.equal(i + 1) - const dequeuedBatch = await rollupQueue.batches(i) + const dequeuedBatch = await rollupQueue.batchHeaders(i) dequeuedBatch.txHash.should.equal( '0x0000000000000000000000000000000000000000000000000000000000000000' ) dequeuedBatch.timestamp.should.equal(0) } - const batchesLength = await rollupQueue.getBatchesLength() + const batchesLength = await rollupQueue.getBatchHeadersLength() batchesLength.should.equal(numBatches) const isEmpty = await rollupQueue.isEmpty() isEmpty.should.equal(true) @@ -133,7 +134,7 @@ describe('RollupQueue', () => { it('should revert if dequeueing from empty queue', async () => { await rollupQueue - .dequeueBatch() + .dequeue() .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot dequeue from an empty queue' ) @@ -142,11 +143,11 @@ describe('RollupQueue', () => { it('should revert if dequeueing from a once populated, now empty queue', async () => { const numBatches = 3 for (let i = 0; i < numBatches; i++) { - await enqueueAndGenerateBatch(defaultTx) - await rollupQueue.dequeueBatch() + await enqueueAndGenerateBatch(DEFAULT_TX) + await rollupQueue.dequeue() } await rollupQueue - .dequeueBatch() + .dequeue() .should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot dequeue from an empty queue' ) @@ -154,7 +155,7 @@ describe('RollupQueue', () => { }) describe('peek() and peekTimestamp()', async () => { it('should peek successfully with single element', async () => { - const localBatch = await enqueueAndGenerateBatch(defaultTx) + const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) const { txHash, timestamp } = await rollupQueue.peek() const expectedBatchHeaderHash = await localBatch.getMerkleRoot() txHash.should.equal(expectedBatchHeaderHash) From 823bf62b3a7dd93d65fed8ac3e4b015ed7d12c15 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Fri, 22 May 2020 15:07:15 -0400 Subject: [PATCH 23/37] add verifyElement test for L1toL2Batch --- .../CanonicalTransactionChain.spec.ts | 41 +++++++++++++++++++ .../test/rollup-list/RLhelper.ts | 12 +++--- 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 3ac82fa704ca0..bf9f272482357 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -109,12 +109,14 @@ describe('CanonicalTransactionChain', () => { it('should not throw when appending a batch from the sequencer', async () => { await appendBatch(DEFAULT_BATCH) }) + it('should throw if submitting an empty batch', async () => { const emptyBatch = [] await appendBatch(emptyBatch).should.be.revertedWith( 'VM Exception while processing transaction: revert Cannot submit an empty batch' ) }) + it('should rever if submitting a 10 minute old batch', async () => { const timestamp = Math.floor(Date.now() / 1000) const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION + 1) @@ -125,6 +127,7 @@ describe('CanonicalTransactionChain', () => { 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp older than the sequencer liveness assumption' ) }) + it('should not revert if submitting a 5 minute old batch', async () => { const timestamp = Math.floor(Date.now() / 1000) const oldTimestamp = timestamp - LIVENESS_ASSUMPTION / 2 @@ -132,6 +135,7 @@ describe('CanonicalTransactionChain', () => { .connect(sequencer) .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) }) + it('should revert if submitting a batch with a future timestamp', async () => { const timestamp = Math.floor(Date.now() / 1000) const futureTimestamp = timestamp + 100 @@ -143,6 +147,7 @@ describe('CanonicalTransactionChain', () => { 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' ) }) + it('should revert if submitting a new batch with a timestamp less than latest batch timestamp', async () => { const timestamp = await appendBatch(DEFAULT_BATCH) const oldTimestamp = timestamp - 1 @@ -154,6 +159,7 @@ describe('CanonicalTransactionChain', () => { 'VM Exception while processing transaction: revert Timestamps must monotonically increase' ) }) + it('should add to batches array', async () => { await appendBatch(DEFAULT_BATCH) const batchesLength = await canonicalTxChain.getBatchesLength() @@ -165,6 +171,7 @@ describe('CanonicalTransactionChain', () => { const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(2) }) + it('should not allow appendTransactionBatch from non-sequencer', async () => { const timestamp = Math.floor(Date.now() / 1000) // Submit the rollup batch on-chain @@ -174,12 +181,14 @@ describe('CanonicalTransactionChain', () => { 'VM Exception while processing transaction: revert Message sender does not have permission to append a batch' ) }) + it('should calculate batchHeaderHash correctly', async () => { const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await canonicalTxChain.batches(0) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) }) + it('should add multiple batches correctly', async () => { const numBatchs = 10 for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { @@ -207,17 +216,20 @@ describe('CanonicalTransactionChain', () => { beforeEach(async () => { localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) }) + it('should succesfully append a batch with an older timestamp', async () => { const oldTimestamp = localBatch.timestamp - 1 await canonicalTxChain .connect(sequencer) .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) }) + it('should succesfully append a batch with an equal timestamp', async () => { await canonicalTxChain .connect(sequencer) .appendTransactionBatch(DEFAULT_BATCH, localBatch.timestamp) }) + it('should revert when appending a block with a newer timestamp', async () => { const newTimestamp = localBatch.timestamp + 1 await canonicalTxChain @@ -236,6 +248,7 @@ describe('CanonicalTransactionChain', () => { const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) localL1ToL2Queue.push(localBatch) }) + it('should successfully dequeue a L1ToL2Batch', async () => { await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() const front = await l1ToL2Queue.front() @@ -246,6 +259,7 @@ describe('CanonicalTransactionChain', () => { '0x0000000000000000000000000000000000000000000000000000000000000000' ) }) + it('should successfully append a L1ToL2Batch', async () => { const { timestamp, txHash } = await l1ToL2Queue.batchHeaders(0) const localBatch = new DefaultRollupBatch( @@ -261,6 +275,7 @@ describe('CanonicalTransactionChain', () => { const batchHeaderHash = await canonicalTxChain.batches(0) batchHeaderHash.should.equal(localBatchHeaderHash) }) + it('should not allow non-sequencer to appendL1ToL2Batch if less than 10 minutes old', async () => { await canonicalTxChain .appendL1ToL2Batch() @@ -268,6 +283,7 @@ describe('CanonicalTransactionChain', () => { 'VM Exception while processing transaction: revert Message sender does not have permission to append this batch' ) }) + describe('after 10 minutes have elapsed', async () => { let snapshotID beforeEach(async () => { @@ -282,6 +298,7 @@ describe('CanonicalTransactionChain', () => { }) }) }) + it('should revert when L1ToL2TxQueue is empty', async () => { await canonicalTxChain .appendL1ToL2Batch() @@ -330,6 +347,30 @@ describe('CanonicalTransactionChain', () => { } }) + it('should return true for valid element from a l1ToL2Batch', async () => { + const l1ToL2Batch = await enqueueAndGenerateBatch(DEFAULT_TX) + await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() + const localBatch = new DefaultRollupBatch( + l1ToL2Batch.timestamp, //timestamp + true, //isL1ToL2Tx + 0, //batchIndex + 0, //cumulativePrevElements + [DEFAULT_TX] //batch + ) + await localBatch.generateTree() + const elementIndex = 0 + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( + elementIndex + ) + const isIncluded = await canonicalTxChain.verifyElement( + DEFAULT_TX, // element + position, + elementInclusionProof + ) + isIncluded.should.equal(true) + }) + it('should return false for wrong position with wrong indexInBatch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] const localBatch = await appendAndGenerateBatch(batch) diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 6cf2ff42238e5..68a81476ffc56 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -12,7 +12,7 @@ import { utils } from 'ethers' interface TxChainBatchHeader { timestamp: number - isL1ToL2: boolean + isL1ToL2Tx: boolean elementsMerkleRoot: string numElementsInBatch: number cumulativePrevElements: number @@ -32,7 +32,7 @@ interface ElementInclusionProof { */ export class DefaultRollupBatch { public timestamp: number - public isL1ToL2: boolean + public isL1ToL2Tx: boolean public batchIndex: number //index in public cumulativePrevElements: number //in batchHeader public elements: string[] //Rollup batch @@ -40,12 +40,12 @@ export class DefaultRollupBatch { constructor( timestamp: number, // Ethereum batch this batch was submitted in - isL1ToL2: boolean, + isL1ToL2Tx: boolean, batchIndex: number, // index in batchs array (first batch has batchIndex of 0) cumulativePrevElements: number, elements: string[] ) { - this.isL1ToL2 = isL1ToL2 + this.isL1ToL2Tx = isL1ToL2Tx this.timestamp = timestamp this.batchIndex = batchIndex this.cumulativePrevElements = cumulativePrevElements @@ -99,7 +99,7 @@ export class DefaultRollupBatch { ['uint', 'bool', 'bytes32', 'uint', 'uint'], [ this.timestamp, - this.isL1ToL2, + this.isL1ToL2Tx, bufToHexString(bufferRoot), this.elements.length, this.cumulativePrevElements, @@ -120,7 +120,7 @@ export class DefaultRollupBatch { batchIndex: this.batchIndex, batchHeader: { timestamp: this.timestamp, - isL1ToL2: this.isL1ToL2, + isL1ToL2Tx: this.isL1ToL2Tx, elementsMerkleRoot: bufToHexString(bufferRoot), numElementsInBatch: this.elements.length, cumulativePrevElements: this.cumulativePrevElements, From 534bc7ab4c1fc7251f697f96fad3172600919e60 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Fri, 22 May 2020 16:13:57 -0400 Subject: [PATCH 24/37] add safetyTxQueue --- .../contracts/SafetyTransactionQueue.sol | 20 ++++++ .../SafetyTransactionQueue.spec.ts | 70 +++++++++++++++++++ 2 files changed, 90 insertions(+) create mode 100644 packages/rollup-contracts/contracts/SafetyTransactionQueue.sol create mode 100644 packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts diff --git a/packages/rollup-contracts/contracts/SafetyTransactionQueue.sol b/packages/rollup-contracts/contracts/SafetyTransactionQueue.sol new file mode 100644 index 0000000000000..3e684a95f0c4d --- /dev/null +++ b/packages/rollup-contracts/contracts/SafetyTransactionQueue.sol @@ -0,0 +1,20 @@ +pragma solidity ^0.5.0; +pragma experimental ABIEncoderV2; + +/* Internal Imports */ +import {RollupQueue} from "./RollupQueue.sol"; + +contract SafetyTransactionQueue is RollupQueue { + address public canonicalTransactionChain; + + constructor( + address _rollupMerkleUtilsAddress, + address _canonicalTransactionChain + ) RollupQueue(_rollupMerkleUtilsAddress) public { + canonicalTransactionChain = _canonicalTransactionChain; + } + + function authenticateDequeue(address _sender) public view returns (bool) { + return _sender == canonicalTransactionChain; + } +} diff --git a/packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts new file mode 100644 index 0000000000000..b474c44e97312 --- /dev/null +++ b/packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts @@ -0,0 +1,70 @@ +import '../setup' + +/* External Imports */ +import { getLogger } from '@eth-optimism/core-utils' +import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' + +/* Logging */ +const log = getLogger('safety-tx-queue', true) + +/* Contract Imports */ +import * as SafetyTransactionQueue from '../../build/SafetyTransactionQueue.json' +import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' + +describe('SafetyTransactionQueue', () => { + const provider = createMockProvider() + const [wallet, canonicalTransactionChain, randomWallet] = getWallets(provider) + const defaultTx = '0x1234' + let safetyTxQueue + let rollupMerkleUtils + + /* Link libraries before tests */ + before(async () => { + rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { + gasLimit: 6700000, + }) + }) + + beforeEach(async () => { + safetyTxQueue = await deployContract( + wallet, + SafetyTransactionQueue, + [rollupMerkleUtils.address, canonicalTransactionChain.address], + { + gasLimit: 6700000, + } + ) + }) + + describe('enqueueBatch() ', async () => { + it('should allow enqueue from any address', async () => { + await safetyTxQueue.connect(randomWallet).enqueueTx(defaultTx) + const batchesLength = await safetyTxQueue.getBatchHeadersLength() + batchesLength.should.equal(1) + }) + }) + + describe('dequeue() ', async () => { + it('should allow dequeue from canonicalTransactionChain', async () => { + await safetyTxQueue.enqueueTx(defaultTx) + await safetyTxQueue.connect(canonicalTransactionChain).dequeue() + const batchesLength = await safetyTxQueue.getBatchHeadersLength() + batchesLength.should.equal(1) + const { txHash, timestamp } = await safetyTxQueue.batchHeaders(0) + txHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + timestamp.should.equal(0) + const front = await safetyTxQueue.front() + front.should.equal(1) + }) + it('should not allow dequeue from other address', async () => { + await safetyTxQueue.enqueueTx(defaultTx) + await safetyTxQueue + .dequeue() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' + ) + }) + }) +}) From 49d45231e623a2fe5fdfe6cfc3422611e0432f8b Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Fri, 22 May 2020 17:05:17 -0400 Subject: [PATCH 25/37] add appendSafetyBatch and tests --- .../contracts/CanonicalTransactionChain.sol | 17 ++- .../CanonicalTransactionChain.spec.ts | 105 +++++++++++++++++- 2 files changed, 114 insertions(+), 8 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 8fd3058302c9d..26ea90ece67d5 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -5,12 +5,14 @@ pragma experimental ABIEncoderV2; import {DataTypes as dt} from "./DataTypes.sol"; import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; import {L1ToL2TransactionQueue} from "./L1ToL2TransactionQueue.sol"; +import {SafetyTransactionQueue} from "./SafetyTransactionQueue.sol"; contract CanonicalTransactionChain { address public sequencer; uint public sequencerLivenessAssumption; RollupMerkleUtils public merkleUtils; L1ToL2TransactionQueue public l1ToL2Queue; + SafetyTransactionQueue public safetyQueue; uint public cumulativeNumElements; bytes32[] public batches; uint public lastOVMTimestamp; @@ -24,6 +26,7 @@ contract CanonicalTransactionChain { merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); sequencer = _sequencer; l1ToL2Queue = new L1ToL2TransactionQueue(_rollupMerkleUtilsAddress, _l1ToL2TransactionPasserAddress, address(this)); + safetyQueue = new SafetyTransactionQueue(_rollupMerkleUtilsAddress, address(this)); sequencerLivenessAssumption =_sequencerLivenessAssumption; lastOVMTimestamp = 0; } @@ -50,6 +53,17 @@ contract CanonicalTransactionChain { function appendL1ToL2Batch() public { dt.TimestampedHash memory timestampedHash = l1ToL2Queue.peek(); + _appendQueueBatch(timestampedHash, true); + l1ToL2Queue.dequeue(); + } + + function appendSafetyBatch() public { + dt.TimestampedHash memory timestampedHash = safetyQueue.peek(); + _appendQueueBatch(timestampedHash, false); + safetyQueue.dequeue(); + } + + function _appendQueueBatch(dt.TimestampedHash memory timestampedHash, bool isL1ToL2Tx) internal { uint timestamp = timestampedHash.timestamp; if (timestamp + sequencerLivenessAssumption > now) { require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); @@ -59,14 +73,13 @@ contract CanonicalTransactionChain { uint numElementsInBatch = 1; bytes32 batchHeaderHash = keccak256(abi.encodePacked( timestamp, - true, // isL1ToL2Tx + isL1ToL2Tx, // isL1ToL2Tx elementsMerkleRoot, numElementsInBatch, cumulativeNumElements // cumulativePrevElements )); batches.push(batchHeaderHash); cumulativeNumElements += numElementsInBatch; - l1ToL2Queue.dequeue(); } function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index bf9f272482357..c03a96785f449 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -14,6 +14,7 @@ const log = getLogger('canonical-tx-chain', true) /* Contract Imports */ import * as CanonicalTransactionChain from '../../build/CanonicalTransactionChain.json' import * as L1ToL2TransactionQueue from '../../build/L1ToL2TransactionQueue.json' +import * as SafetyTransactionQueue from '../../build/SafetyTransactionQueue.json' import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ @@ -24,11 +25,14 @@ describe('CanonicalTransactionChain', () => { sequencer, canonicalTransactionChain, l1ToL2TransactionPasser, + randomWallet, ] = getWallets(provider) let canonicalTxChain let rollupMerkleUtils let l1ToL2Queue + let safetyQueue const localL1ToL2Queue = [] + const localSafetyQueue = [] const LIVENESS_ASSUMPTION = 600 //600 seconds = 10 minutes const DEFAULT_BATCH = ['0x1234', '0x5678'] const DEFAULT_TX = '0x1234' @@ -60,14 +64,29 @@ describe('CanonicalTransactionChain', () => { return localBatch } - const enqueueAndGenerateBatch = async ( + const enqueueAndGenerateL1ToL2Batch = async ( _tx: string ): Promise => { // Submit the rollup batch on-chain const enqueueTx = await l1ToL2Queue .connect(l1ToL2TransactionPasser) .enqueueTx(_tx) - const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) + const localBatch = await generateQueueBatch(_tx, enqueueTx.hash) + return localBatch + } + const enqueueAndGenerateSafetyBatch = async ( + _tx: string + ): Promise => { + const enqueueTx = await safetyQueue.connect(randomWallet).enqueueTx(_tx) + const localBatch = await generateQueueBatch(_tx, enqueueTx.hash) + return localBatch + } + + const generateQueueBatch = async ( + _tx: string, + _txHash: string + ): Promise => { + const txReceipt = await provider.getTransactionReceipt(_txHash) const timestamp = (await provider.getBlock(txReceipt.blockNumber)).timestamp // Generate a local version of the rollup batch const localBatch = new RollupQueueBatch(_tx, timestamp) @@ -97,12 +116,20 @@ describe('CanonicalTransactionChain', () => { gasLimit: 6700000, } ) + const l1ToL2QueueAddress = await canonicalTxChain.l1ToL2Queue() l1ToL2Queue = new Contract( l1ToL2QueueAddress, L1ToL2TransactionQueue.abi, provider ) + + const safetyQueueAddress = await canonicalTxChain.safetyQueue() + safetyQueue = new Contract( + safetyQueueAddress, + SafetyTransactionQueue.abi, + provider + ) }) describe('appendTransactionBatch()', async () => { @@ -117,7 +144,7 @@ describe('CanonicalTransactionChain', () => { ) }) - it('should rever if submitting a 10 minute old batch', async () => { + it('should revert if submitting a 10 minute old batch', async () => { const timestamp = Math.floor(Date.now() / 1000) const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION + 1) await canonicalTxChain @@ -214,7 +241,7 @@ describe('CanonicalTransactionChain', () => { describe('when the l1ToL2Queue is not empty', async () => { let localBatch beforeEach(async () => { - localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) + localBatch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) }) it('should succesfully append a batch with an older timestamp', async () => { @@ -245,7 +272,7 @@ describe('CanonicalTransactionChain', () => { describe('appendL1ToL2Batch()', async () => { describe('when there is a batch in the L1toL2Queue', async () => { beforeEach(async () => { - const localBatch = await enqueueAndGenerateBatch(DEFAULT_TX) + const localBatch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) localL1ToL2Queue.push(localBatch) }) @@ -308,6 +335,72 @@ describe('CanonicalTransactionChain', () => { }) }) + describe('appendSafetyBatch()', async () => { + describe('when there is a batch in the SafetyQueue', async () => { + beforeEach(async () => { + const localBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + localSafetyQueue.push(localBatch) + }) + + it('should successfully dequeue a SafetyBatch', async () => { + await canonicalTxChain.connect(sequencer).appendSafetyBatch() + const front = await safetyQueue.front() + front.should.equal(1) + const { timestamp, txHash } = await safetyQueue.batchHeaders(0) + timestamp.should.equal(0) + txHash.should.equal( + '0x0000000000000000000000000000000000000000000000000000000000000000' + ) + }) + + it('should successfully append a SafetyBatch', async () => { + const { timestamp, txHash } = await safetyQueue.batchHeaders(0) + const localBatch = new DefaultRollupBatch( + timestamp, + false, // isL1ToL2Tx + 0, //batchIndex + 0, // cumulativePrevElements + [DEFAULT_TX] // elements + ) + await localBatch.generateTree() + const localBatchHeaderHash = await localBatch.hashBatchHeader() + await canonicalTxChain.connect(sequencer).appendSafetyBatch() + const batchHeaderHash = await canonicalTxChain.batches(0) + batchHeaderHash.should.equal(localBatchHeaderHash) + }) + + it('should not allow non-sequencer to appendSafetyBatch if less than 10 minutes old', async () => { + await canonicalTxChain + .appendSafetyBatch() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Message sender does not have permission to append this batch' + ) + }) + + describe('after 10 minutes have elapsed', async () => { + let snapshotID + beforeEach(async () => { + snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + }) + afterEach(async () => { + await provider.send('evm_revert', [snapshotID]) + }) + it('should allow non-sequencer to appendSafetyBatch', async () => { + await canonicalTxChain.appendSafetyBatch() + }) + }) + }) + + it('should revert when SafetyTxQueue is empty', async () => { + await canonicalTxChain + .appendSafetyBatch() + .should.be.revertedWith( + 'VM Exception while processing transaction: revert Queue is empty, no element to peek at' + ) + }) + }) + describe('verifyElement() ', async () => { it('should return true for valid elements for different batches and elements', async () => { const numBatches = 3 @@ -348,7 +441,7 @@ describe('CanonicalTransactionChain', () => { }) it('should return true for valid element from a l1ToL2Batch', async () => { - const l1ToL2Batch = await enqueueAndGenerateBatch(DEFAULT_TX) + const l1ToL2Batch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() const localBatch = new DefaultRollupBatch( l1ToL2Batch.timestamp, //timestamp From b3bd0c683f69e2df3144442157260597d12946ef Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Fri, 22 May 2020 17:16:35 -0400 Subject: [PATCH 26/37] add verifyElement test for SafetyBatch --- .../CanonicalTransactionChain.spec.ts | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index c03a96785f449..e0b5c8175aedc 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -464,6 +464,30 @@ describe('CanonicalTransactionChain', () => { isIncluded.should.equal(true) }) + it('should return true for valid element from a SafetyBatch', async () => { + const l1ToL2Batch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + await canonicalTxChain.connect(sequencer).appendSafetyBatch() + const localBatch = new DefaultRollupBatch( + l1ToL2Batch.timestamp, //timestamp + false, //isL1ToL2Tx + 0, //batchIndex + 0, //cumulativePrevElements + [DEFAULT_TX] //batch + ) + await localBatch.generateTree() + const elementIndex = 0 + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( + elementIndex + ) + const isIncluded = await canonicalTxChain.verifyElement( + DEFAULT_TX, // element + position, + elementInclusionProof + ) + isIncluded.should.equal(true) + }) + it('should return false for wrong position with wrong indexInBatch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] const localBatch = await appendAndGenerateBatch(batch) From 7e141310cf4eb2a1211d87883f0fee68b947aeea Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Fri, 22 May 2020 20:27:09 -0400 Subject: [PATCH 27/37] fix timestamp test --- .../contracts/CanonicalTransactionChain.sol | 24 ++++++++++++++----- .../CanonicalTransactionChain.spec.ts | 7 ++++-- 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 26ea90ece67d5..f70103b08c7d8 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -52,18 +52,27 @@ contract CanonicalTransactionChain { } function appendL1ToL2Batch() public { - dt.TimestampedHash memory timestampedHash = l1ToL2Queue.peek(); - _appendQueueBatch(timestampedHash, true); + dt.TimestampedHash memory l1ToL2Header = l1ToL2Queue.peek(); + if(!safetyQueue.isEmpty()) { + require(l1ToL2Header.timestamp <= safetyQueue.peekTimestamp(), "Must process older SafetyQueue batches first to enforce timestamp monotonicity"); + } + _appendQueueBatch(l1ToL2Header, true); l1ToL2Queue.dequeue(); } function appendSafetyBatch() public { - dt.TimestampedHash memory timestampedHash = safetyQueue.peek(); - _appendQueueBatch(timestampedHash, false); + dt.TimestampedHash memory safetyHeader = safetyQueue.peek(); + if(!l1ToL2Queue.isEmpty()) { + require(safetyHeader.timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older L1ToL2Queue batches first to enforce timestamp monotonicity"); + } + _appendQueueBatch(safetyHeader, false); safetyQueue.dequeue(); } - function _appendQueueBatch(dt.TimestampedHash memory timestampedHash, bool isL1ToL2Tx) internal { + function _appendQueueBatch( + dt.TimestampedHash memory timestampedHash, + bool isL1ToL2Tx + ) internal { uint timestamp = timestampedHash.timestamp; if (timestamp + sequencerLivenessAssumption > now) { require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); @@ -88,7 +97,10 @@ contract CanonicalTransactionChain { require(_timestamp + sequencerLivenessAssumption > now, "Cannot submit a batch with a timestamp older than the sequencer liveness assumption"); require(_timestamp <= now, "Cannot submit a batch with a timestamp in the future"); if(!l1ToL2Queue.isEmpty()) { - require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older queued batches first to enforce timestamp monotonicity"); + require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older L1ToL2Queue batches first to enforce timestamp monotonicity"); + } + if(!safetyQueue.isEmpty()) { + require(_timestamp <= safetyQueue.peekTimestamp(), "Must process older SafetyQueue batches first to enforce timestamp monotonicity"); } require(_timestamp >= lastOVMTimestamp, "Timestamps must monotonically increase"); lastOVMTimestamp = _timestamp; diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index e0b5c8175aedc..8a5a7a3399bb0 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -257,14 +257,17 @@ describe('CanonicalTransactionChain', () => { .appendTransactionBatch(DEFAULT_BATCH, localBatch.timestamp) }) - it('should revert when appending a block with a newer timestamp', async () => { + it('should revert when there is an older batch in the L1ToL2Queue', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) const newTimestamp = localBatch.timestamp + 1 await canonicalTxChain .connect(sequencer) .appendTransactionBatch(DEFAULT_BATCH, newTimestamp) .should.be.revertedWith( - 'VM Exception while processing transaction: revert Cannot submit a batch with a timestamp in the future' + 'VM Exception while processing transaction: revert Must process older L1ToL2Queue batches first to enforce timestamp monotonicity' ) + await provider.send('evm_revert', [snapshotID]) }) }) }) From 74caa381735318cfece3f4bd5c1474d67cd11bbb Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Sat, 23 May 2020 00:12:03 -0400 Subject: [PATCH 28/37] use TestUtils to assert revert messages --- .../CanonicalTransactionChain.spec.ts | 33 ++++++++++--------- .../SafetyTransactionQueue.spec.ts | 13 ++++---- 2 files changed, 25 insertions(+), 21 deletions(-) diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 04cc4337bf16b..851c36de1aeef 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -343,11 +343,12 @@ describe('CanonicalTransactionChain', () => { }) it('should revert when L1ToL2TxQueue is empty', async () => { - await canonicalTxChain - .appendL1ToL2Batch() - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Queue is empty, no element to peek at' - ) + await TestUtils.assertRevertsAsync( + 'Queue is empty, no element to peek at', + async () => { + await canonicalTxChain.appendL1ToL2Batch() + } + ) }) }) @@ -386,11 +387,12 @@ describe('CanonicalTransactionChain', () => { }) it('should not allow non-sequencer to appendSafetyBatch if less than 10 minutes old', async () => { - await canonicalTxChain - .appendSafetyBatch() - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to append this batch' - ) + await TestUtils.assertRevertsAsync( + 'Message sender does not have permission to append this batch', + async () => { + await canonicalTxChain.appendSafetyBatch() + } + ) }) describe('after 10 minutes have elapsed', async () => { @@ -409,11 +411,12 @@ describe('CanonicalTransactionChain', () => { }) it('should revert when SafetyTxQueue is empty', async () => { - await canonicalTxChain - .appendSafetyBatch() - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Queue is empty, no element to peek at' - ) + await TestUtils.assertRevertsAsync( + 'Queue is empty, no element to peek at', + async () => { + await canonicalTxChain.appendSafetyBatch() + } + ) }) }) diff --git a/packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts index b474c44e97312..62a4d38a0a895 100644 --- a/packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/SafetyTransactionQueue.spec.ts @@ -1,7 +1,7 @@ import '../setup' /* External Imports */ -import { getLogger } from '@eth-optimism/core-utils' +import { getLogger, TestUtils } from '@eth-optimism/core-utils' import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' /* Logging */ @@ -60,11 +60,12 @@ describe('SafetyTransactionQueue', () => { }) it('should not allow dequeue from other address', async () => { await safetyTxQueue.enqueueTx(defaultTx) - await safetyTxQueue - .dequeue() - .should.be.revertedWith( - 'VM Exception while processing transaction: revert Message sender does not have permission to dequeue' - ) + await TestUtils.assertRevertsAsync( + 'Message sender does not have permission to dequeue', + async () => { + await safetyTxQueue.dequeue() + } + ) }) }) }) From b3970ba8772fde8fb44c3d49263f796db59f0935 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 25 May 2020 11:29:37 -0400 Subject: [PATCH 29/37] added timestamp unit tests --- .../CanonicalTransactionChain.spec.ts | 142 ++++++++++++++---- 1 file changed, 115 insertions(+), 27 deletions(-) diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 851c36de1aeef..3e46bf889f9c1 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -248,7 +248,8 @@ describe('CanonicalTransactionChain', () => { const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatchs) }) - describe('when the l1ToL2Queue is not empty', async () => { + + describe('when there is a batch in the L1toL2Queue', async () => { let localBatch beforeEach(async () => { localBatch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) @@ -268,7 +269,7 @@ describe('CanonicalTransactionChain', () => { }) it('should revert when there is an older batch in the L1ToL2Queue', async () => { - const snapshot = await provider.send('evm_snapshot', []) + const snapshotID = await provider.send('evm_snapshot', []) await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) const newTimestamp = localBatch.timestamp + 1 await TestUtils.assertRevertsAsync( @@ -279,7 +280,63 @@ describe('CanonicalTransactionChain', () => { .appendTransactionBatch(DEFAULT_BATCH, newTimestamp) } ) - await provider.send('evm_revert', [snapshot]) + await provider.send('evm_revert', [snapshotID]) + }) + }) + + describe('when there is a batch in the SafetyQueue', async () => { + let localBatch + beforeEach(async () => { + localBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + }) + + it('should succesfully append a batch with an older timestamp', async () => { + const oldTimestamp = localBatch.timestamp - 1 + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + }) + + it('should succesfully append a batch with an equal timestamp', async () => { + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(DEFAULT_BATCH, localBatch.timestamp) + }) + + it('should revert when there is an older batch in the SafetyQueue', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + const newTimestamp = localBatch.timestamp + 1 + await TestUtils.assertRevertsAsync( + 'Must process older SafetyQueue batches first to enforce timestamp monotonicity', + async () => { + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(DEFAULT_BATCH, newTimestamp) + } + ) + await provider.send('evm_revert', [snapshotID]) + }) + }) + describe('when both the safetyQueue and l1ToL2Queue are not empty', async () => { + let timestamp + beforeEach(async () => { + const localBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + timestamp = localBatch.timestamp + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + }) + + it('should succesfully append a batch with an older timestamp than the oldest batch', async () => { + const oldTimestamp = timestamp - 1 + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + }) + + it('should succesfully append a batch with a timestamp equal to the oldest batch', async () => { + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(DEFAULT_BATCH, timestamp) }) }) }) @@ -327,21 +384,37 @@ describe('CanonicalTransactionChain', () => { ) }) - describe('after inclusion period has elapsed', async () => { - let snapshotID - beforeEach(async () => { - snapshotID = await provider.send('evm_snapshot', []) - await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) - }) - afterEach(async () => { - await provider.send('evm_revert', [snapshotID]) - }) - it('should allow non-sequencer to appendL1ToL2Batch', async () => { - await canonicalTxChain.appendL1ToL2Batch() - }) + it('should allow non-sequencer to appendL1ToL2Batch after inclusion period has elapsed', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + await canonicalTxChain.appendL1ToL2Batch() + await provider.send('evm_revert', [snapshotID]) }) }) + it('should revert when there is an older batch in the SafetyQueue ', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + await provider.send('evm_increaseTime', [10]) + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + await TestUtils.assertRevertsAsync( + 'Must process older SafetyQueue batches first to enforce timestamp monotonicity', + async () => { + await canonicalTxChain.appendL1ToL2Batch() + } + ) + await provider.send('evm_revert', [snapshotID]) + }) + + it('should succeed when there are only newer batches in the SafetyQueue ', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + await provider.send('evm_increaseTime', [10]) + await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() + await provider.send('evm_revert', [snapshotID]) + }) + it('should revert when L1ToL2TxQueue is empty', async () => { await TestUtils.assertRevertsAsync( 'Queue is empty, no element to peek at', @@ -394,20 +467,35 @@ describe('CanonicalTransactionChain', () => { } ) }) + it('should allow non-sequencer to appendSafetyBatch after 10 minutes have elapsed', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + await canonicalTxChain.appendSafetyBatch() + await provider.send('evm_revert', [snapshotID]) + }) + }) - describe('after 10 minutes have elapsed', async () => { - let snapshotID - beforeEach(async () => { - snapshotID = await provider.send('evm_snapshot', []) - await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) - }) - afterEach(async () => { - await provider.send('evm_revert', [snapshotID]) - }) - it('should allow non-sequencer to appendSafetyBatch', async () => { + it('should revert when trying to appendSafetyBatch when there is an older batch in the L1ToL2Queue ', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + await provider.send('evm_increaseTime', [10]) + await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + await TestUtils.assertRevertsAsync( + 'Must process older L1ToL2Queue batches first to enforce timestamp monotonicity', + async () => { await canonicalTxChain.appendSafetyBatch() - }) - }) + } + ) + await provider.send('evm_revert', [snapshotID]) + }) + + it('should succeed when there are only newer batches in the L1ToL2Queue ', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + await provider.send('evm_increaseTime', [10]) + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + await canonicalTxChain.connect(sequencer).appendSafetyBatch() + await provider.send('evm_revert', [snapshotID]) }) it('should revert when SafetyTxQueue is empty', async () => { From f3208431e66c251d45d7fc8a613b5771819db77c Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 25 May 2020 11:47:57 -0400 Subject: [PATCH 30/37] clean up unit tests --- .../rollup-list/CanonicalTransactionChain.spec.ts | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 3e46bf889f9c1..39803ff2a9d9b 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -31,8 +31,6 @@ describe('CanonicalTransactionChain', () => { let rollupMerkleUtils let l1ToL2Queue let safetyQueue - const localL1ToL2Queue = [] - const localSafetyQueue = [] const LIVENESS_ASSUMPTION = 600 //600 seconds = 10 minutes const DEFAULT_BATCH = ['0x1234', '0x5678'] const DEFAULT_TX = '0x1234' @@ -344,8 +342,7 @@ describe('CanonicalTransactionChain', () => { describe('appendL1ToL2Batch()', async () => { describe('when there is a batch in the L1toL2Queue', async () => { beforeEach(async () => { - const localBatch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) - localL1ToL2Queue.push(localBatch) + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) }) it('should successfully dequeue a L1ToL2Batch', async () => { @@ -428,8 +425,7 @@ describe('CanonicalTransactionChain', () => { describe('appendSafetyBatch()', async () => { describe('when there is a batch in the SafetyQueue', async () => { beforeEach(async () => { - const localBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) - localSafetyQueue.push(localBatch) + await enqueueAndGenerateSafetyBatch(DEFAULT_TX) }) it('should successfully dequeue a SafetyBatch', async () => { @@ -572,10 +568,10 @@ describe('CanonicalTransactionChain', () => { }) it('should return true for valid element from a SafetyBatch', async () => { - const l1ToL2Batch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + const safetyBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) await canonicalTxChain.connect(sequencer).appendSafetyBatch() const localBatch = new DefaultRollupBatch( - l1ToL2Batch.timestamp, //timestamp + safetyBatch.timestamp, //timestamp false, //isL1ToL2Tx 0, //batchIndex 0, //cumulativePrevElements From a8a6e5ef4866fe3171d683b3ae772b2a1441316f Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 28 May 2020 01:44:42 -0400 Subject: [PATCH 31/37] add state commitment chain, clean up canonical tx chain tests --- .../contracts/CanonicalTransactionChain.sol | 2 +- .../rollup-contracts/contracts/DataTypes.sol | 13 ++ .../contracts/StateCommitmentChain.sol | 70 +++++++++++ .../CanonicalTransactionChain.spec.ts | 38 +++--- .../test/rollup-list/RLhelper.ts | 103 ++++++++++++--- .../test/rollup-list/RollupQueue.spec.ts | 8 +- .../rollup-list/StateCommitmentChain.spec.ts | 119 ++++++++++++++++++ 7 files changed, 309 insertions(+), 44 deletions(-) create mode 100644 packages/rollup-contracts/contracts/StateCommitmentChain.sol create mode 100644 packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index 25328b798b2de..d29cfd49a8138 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -82,7 +82,7 @@ contract CanonicalTransactionChain { uint numElementsInBatch = 1; bytes32 batchHeaderHash = keccak256(abi.encodePacked( timestamp, - isL1ToL2Tx, // isL1ToL2Tx + isL1ToL2Tx, elementsMerkleRoot, numElementsInBatch, cumulativeNumElements // cumulativePrevElements diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index 2d8455c0fa55e..6048508aa7e3a 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -18,6 +18,19 @@ contract DataTypes { bytes32[] siblings; // used to verify inclusion of the element in elementsMerkleRoot } + struct StateElementInclusionProof { + uint batchIndex; // index in batches array (first batch has batchNumber of 0) + StateChainBatchHeader batchHeader; + uint indexInBatch; // used to verify inclusion of the element in elementsMerkleRoot + bytes32[] siblings; // used to verify inclusion of the element in elementsMerkleRoot + } + + struct StateChainBatchHeader { + bytes32 elementsMerkleRoot; + uint numElementsInBatch; + uint cumulativePrevElements; + } + struct TxChainBatchHeader { uint timestamp; bool isL1ToL2Tx; diff --git a/packages/rollup-contracts/contracts/StateCommitmentChain.sol b/packages/rollup-contracts/contracts/StateCommitmentChain.sol new file mode 100644 index 0000000000000..c77ee3e5e0603 --- /dev/null +++ b/packages/rollup-contracts/contracts/StateCommitmentChain.sol @@ -0,0 +1,70 @@ +pragma solidity ^0.5.0; +pragma experimental ABIEncoderV2; + +/* Internal Imports */ +import {DataTypes as dt} from "./DataTypes.sol"; +import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; + +contract StateCommitmentChain { + address public canonicalTransactionChain; + RollupMerkleUtils public merkleUtils; + uint public cumulativeNumElements; + bytes32[] public batches; + + constructor( + address _rollupMerkleUtilsAddress, + address _canonicalTransactionChain + ) public { + merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); + canonicalTransactionChain = _canonicalTransactionChain; + } + + function getBatchesLength() public view returns (uint) { + return batches.length; + } + + function hashBatchHeader( + dt.TxChainBatchHeader memory _batchHeader + ) public pure returns (bytes32) { + return keccak256(abi.encodePacked( + _batchHeader.elementsMerkleRoot, + _batchHeader.numElementsInBatch, + _batchHeader.cumulativePrevElements + )); + } + + function appendStateBatch(bytes[] memory _stateBatch) public { + require(_stateBatch.length > 0, "Cannot submit an empty state commitment batch"); + // TODO Check that number of state commitments is less than or equal to num txs in canonical tx chain + bytes32 batchHeaderHash = keccak256(abi.encodePacked( + merkleUtils.getMerkleRoot(_stateBatch), // elementsMerkleRoot + _stateBatch.length, // numElementsInBatch + cumulativeNumElements // cumulativeNumElements + )); + batches.push(batchHeaderHash); + cumulativeNumElements += _stateBatch.length; + } + + // // verifies an element is in the current list at the given position + // function verifyElement( + // bytes memory _element, // the element of the list being proven + // uint _position, // the position in the list of the element being proven + // dt.StateElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch + // ) public view returns (bool) { + // // For convenience, store the batchHeader + // dt.StateChainBatchHeader memory batchHeader = _inclusionProof.batchHeader; + // // make sure absolute position equivalent to relative positions + // if(_position != _inclusionProof.indexInBatch + + // batchHeader.cumulativePrevElements) + // return false; + // // verify elementsMerkleRoot + // if (!merkleUtils.verify( + // batchHeader.elementsMerkleRoot, + // _element, + // _inclusionProof.indexInBatch, + // _inclusionProof.siblings + // )) return false; + // //compare computed batch header with the batch header in the list. + // return hashBatchHeader(batchHeader) == batches[_inclusionProof.batchIndex]; + // } +} diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 39803ff2a9d9b..bb3d43bd82c6c 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -6,7 +6,7 @@ import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' import { Contract } from 'ethers' /* Internal Imports */ -import { DefaultRollupBatch, RollupQueueBatch } from './RLhelper' +import { CanonicalTxBatch, TxQueueBatch } from './RLhelper' /* Logging */ const log = getLogger('canonical-tx-chain', true) @@ -18,15 +18,11 @@ import * as SafetyTransactionQueue from '../../build/SafetyTransactionQueue.json import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ -describe('CanonicalTransactionChain', () => { +describe.only('CanonicalTransactionChain', () => { const provider = createMockProvider() - const [ - wallet, - sequencer, - canonicalTransactionChain, - l1ToL2TransactionPasser, - randomWallet, - ] = getWallets(provider) + const [wallet, sequencer, l1ToL2TransactionPasser, randomWallet] = getWallets( + provider + ) let canonicalTxChain let rollupMerkleUtils let l1ToL2Queue @@ -48,10 +44,10 @@ describe('CanonicalTransactionChain', () => { batch: string[], batchIndex: number = 0, cumulativePrevElements: number = 0 - ): Promise => { + ): Promise => { const timestamp = await appendBatch(batch) // Generate a local version of the rollup batch - const localBatch = new DefaultRollupBatch( + const localBatch = new CanonicalTxBatch( timestamp, false, batchIndex, @@ -64,7 +60,7 @@ describe('CanonicalTransactionChain', () => { const enqueueAndGenerateL1ToL2Batch = async ( _tx: string - ): Promise => { + ): Promise => { // Submit the rollup batch on-chain const enqueueTx = await l1ToL2Queue .connect(l1ToL2TransactionPasser) @@ -74,7 +70,7 @@ describe('CanonicalTransactionChain', () => { } const enqueueAndGenerateSafetyBatch = async ( _tx: string - ): Promise => { + ): Promise => { const enqueueTx = await safetyQueue.connect(randomWallet).enqueueTx(_tx) const localBatch = await generateQueueBatch(_tx, enqueueTx.hash) return localBatch @@ -83,11 +79,11 @@ describe('CanonicalTransactionChain', () => { const generateQueueBatch = async ( _tx: string, _txHash: string - ): Promise => { + ): Promise => { const txReceipt = await provider.getTransactionReceipt(_txHash) const timestamp = (await provider.getBlock(txReceipt.blockNumber)).timestamp // Generate a local version of the rollup batch - const localBatch = new RollupQueueBatch(_tx, timestamp) + const localBatch = new TxQueueBatch(_tx, timestamp) await localBatch.generateTree() return localBatch } @@ -269,7 +265,7 @@ describe('CanonicalTransactionChain', () => { it('should revert when there is an older batch in the L1ToL2Queue', async () => { const snapshotID = await provider.send('evm_snapshot', []) await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) - const newTimestamp = localBatch.timestamp + 1 + const newTimestamp = localBatch.timestamp + 60 await TestUtils.assertRevertsAsync( 'Must process older L1ToL2Queue batches first to enforce timestamp monotonicity', async () => { @@ -304,7 +300,7 @@ describe('CanonicalTransactionChain', () => { it('should revert when there is an older batch in the SafetyQueue', async () => { const snapshotID = await provider.send('evm_snapshot', []) await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) - const newTimestamp = localBatch.timestamp + 1 + const newTimestamp = localBatch.timestamp + 60 await TestUtils.assertRevertsAsync( 'Must process older SafetyQueue batches first to enforce timestamp monotonicity', async () => { @@ -358,7 +354,7 @@ describe('CanonicalTransactionChain', () => { it('should successfully append a L1ToL2Batch', async () => { const { timestamp, txHash } = await l1ToL2Queue.batchHeaders(0) - const localBatch = new DefaultRollupBatch( + const localBatch = new CanonicalTxBatch( timestamp, true, // isL1ToL2Tx 0, //batchIndex @@ -441,7 +437,7 @@ describe('CanonicalTransactionChain', () => { it('should successfully append a SafetyBatch', async () => { const { timestamp, txHash } = await safetyQueue.batchHeaders(0) - const localBatch = new DefaultRollupBatch( + const localBatch = new CanonicalTxBatch( timestamp, false, // isL1ToL2Tx 0, //batchIndex @@ -546,7 +542,7 @@ describe('CanonicalTransactionChain', () => { it('should return true for valid element from a l1ToL2Batch', async () => { const l1ToL2Batch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() - const localBatch = new DefaultRollupBatch( + const localBatch = new CanonicalTxBatch( l1ToL2Batch.timestamp, //timestamp true, //isL1ToL2Tx 0, //batchIndex @@ -570,7 +566,7 @@ describe('CanonicalTransactionChain', () => { it('should return true for valid element from a SafetyBatch', async () => { const safetyBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) await canonicalTxChain.connect(sequencer).appendSafetyBatch() - const localBatch = new DefaultRollupBatch( + const localBatch = new CanonicalTxBatch( safetyBatch.timestamp, //timestamp false, //isL1ToL2Tx 0, //batchIndex diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 68a81476ffc56..8355f169a7f4e 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -18,35 +18,37 @@ interface TxChainBatchHeader { cumulativePrevElements: number } -interface ElementInclusionProof { +interface TxElementInclusionProof { batchIndex: number batchHeader: TxChainBatchHeader indexInBatch: number siblings: string[] } -/* - * Helper class which provides all information requried for a particular - * Rollup batch. This includes all of the transactions in readable form - * as well as the merkle tree which it generates. - */ -export class DefaultRollupBatch { - public timestamp: number - public isL1ToL2Tx: boolean +interface StateBatchHeader { + elementsMerkleRoot: string + numElementsInBatch: number + cumulativePrevElements: number +} + +interface StateElementInclusionProof { + batchIndex: number + batchHeader: StateBatchHeader + indexInBatch: number + siblings: string[] +} + +export class ChainBatch { public batchIndex: number //index in public cumulativePrevElements: number //in batchHeader public elements: string[] //Rollup batch public elementsMerkleTree: SparseMerkleTreeImpl constructor( - timestamp: number, // Ethereum batch this batch was submitted in - isL1ToL2Tx: boolean, batchIndex: number, // index in batchs array (first batch has batchIndex of 0) cumulativePrevElements: number, elements: string[] ) { - this.isL1ToL2Tx = isL1ToL2Tx - this.timestamp = timestamp this.batchIndex = batchIndex this.cumulativePrevElements = cumulativePrevElements this.elements = elements @@ -55,7 +57,6 @@ export class DefaultRollupBatch { * Generate the elements merkle tree from this.elements */ public async generateTree(): Promise { - // Create a tree! const treeHeight = Math.ceil(Math.log2(this.elements.length)) + 1 // The height should actually not be plus 1 this.elementsMerkleTree = await SparseMerkleTreeImpl.create( newInMemoryDB(), @@ -93,6 +94,52 @@ export class DefaultRollupBatch { return siblings } + public async hashBatchHeader(): Promise { + return '' + } + + /* + * elementIndex is the index in this batch of the element + * that we want to create an inclusion proof for. + */ + public async getElementInclusionProof( + elementIndex: number + ): Promise { + const bufferRoot = await this.elementsMerkleTree.getRootHash() + return { + batchIndex: this.batchIndex, + batchHeader: { + elementsMerkleRoot: bufToHexString(bufferRoot), + numElementsInBatch: this.elements.length, + cumulativePrevElements: this.cumulativePrevElements, + }, + indexInBatch: elementIndex, + siblings: await this.getSiblings(elementIndex), + } + } +} + +/* + * Helper class which provides all information requried for a particular + * Rollup batch. This includes all of the transactions in readable form + * as well as the merkle tree which it generates. + */ +export class CanonicalTxBatch extends ChainBatch { + public timestamp: number + public isL1ToL2Tx: boolean + + constructor( + timestamp: number, // Ethereum batch this batch was submitted in + isL1ToL2Tx: boolean, + batchIndex: number, // index in batchs array (first batch has batchIndex of 0) + cumulativePrevElements: number, + elements: string[] + ) { + super(batchIndex, cumulativePrevElements, elements) + this.isL1ToL2Tx = isL1ToL2Tx + this.timestamp = timestamp + } + public async hashBatchHeader(): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() return utils.solidityKeccak256( @@ -111,10 +158,9 @@ export class DefaultRollupBatch { * elementIndex is the index in this batch of the element * that we want to create an inclusion proof for. */ - public async getElementInclusionProof( elementIndex: number - ): Promise { + ): Promise { const bufferRoot = await this.elementsMerkleTree.getRootHash() return { batchIndex: this.batchIndex, @@ -130,12 +176,35 @@ export class DefaultRollupBatch { } } } + +export class StateChainBatch extends ChainBatch { + constructor( + batchIndex: number, // index in batchs array (first batch has batchIndex of 0) + cumulativePrevElements: number, + elements: string[] + ) { + super(batchIndex, cumulativePrevElements, elements) + } + + public async hashBatchHeader(): Promise { + const bufferRoot = await this.elementsMerkleTree.getRootHash() + return utils.solidityKeccak256( + ['bytes32', 'uint', 'uint'], + [ + bufToHexString(bufferRoot), + this.elements.length, + this.cumulativePrevElements, + ] + ) + } +} + /* * Helper class which provides all information requried for a particular * Rollup Queue Batch. This includes all of the transactions in readable form * as well as the merkle tree which it generates. */ -export class RollupQueueBatch { +export class TxQueueBatch { public elements: string[] public elementsMerkleTree: SparseMerkleTreeImpl public timestamp: number diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 202a5ce9840c8..11ea6821acbd7 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -5,7 +5,7 @@ import { getLogger, TestUtils } from '@eth-optimism/core-utils' import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' /* Internal Imports */ -import { RollupQueueBatch } from './RLhelper' +import { TxQueueBatch } from './RLhelper' /* Logging */ const log = getLogger('rollup-queue', true) @@ -38,15 +38,13 @@ describe('RollupQueue', () => { ) }) - const enqueueAndGenerateBatch = async ( - tx: string - ): Promise => { + const enqueueAndGenerateBatch = async (tx: string): Promise => { // Submit the rollup batch on-chain const enqueueTx = await rollupQueue.enqueueTx(tx) const txReceipt = await provider.getTransactionReceipt(enqueueTx.hash) const timestamp = (await provider.getBlock(txReceipt.blockNumber)).timestamp // Generate a local version of the rollup batch - const localBatch = new RollupQueueBatch(tx, timestamp) + const localBatch = new TxQueueBatch(tx, timestamp) await localBatch.generateTree() return localBatch } diff --git a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts new file mode 100644 index 0000000000000..f63712ec6c851 --- /dev/null +++ b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts @@ -0,0 +1,119 @@ +import '../setup' + +/* External Imports */ +import { getLogger, TestUtils } from '@eth-optimism/core-utils' +import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' +import { Contract } from 'ethers' + +/* Internal Imports */ +import { StateChainBatch } from './RLhelper' + +/* Logging */ +const log = getLogger('state-commitment-chain', true) + +/* Contract Imports */ +import * as StateCommitmentChain from '../../build/StateCommitmentChain.json' +import * as L1ToL2TransactionQueue from '../../build/L1ToL2TransactionQueue.json' +import * as SafetyTransactionQueue from '../../build/SafetyTransactionQueue.json' +import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' + +/* Begin tests */ +describe('StateCommitmentChain', () => { + const provider = createMockProvider() + const [wallet, canonicalTransactionChain, randomWallet] = getWallets(provider) + let stateChain + let rollupMerkleUtils + const DEFAULT_BATCH = ['0x1234', '0x5678'] + const DEFAULT_STATE_ROOT = '0x1234' + + const appendAndGenerateBatch = async ( + batch: string[], + batchIndex: number = 0, + cumulativePrevElements: number = 0 + ): Promise => { + await stateChain.appendStateBatch(batch) + // Generate a local version of the rollup batch + const localBatch = new StateChainBatch( + batchIndex, + cumulativePrevElements, + batch + ) + await localBatch.generateTree() + return localBatch + } + + /* Link libraries before tests */ + before(async () => { + rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { + gasLimit: 6700000, + }) + }) + + /* Deploy a new RollupChain before each test */ + beforeEach(async () => { + stateChain = await deployContract( + wallet, + StateCommitmentChain, + [rollupMerkleUtils.address, canonicalTransactionChain.address], + { + gasLimit: 6700000, + } + ) + }) + + describe('appendStateBatch()', async () => { + it('should not throw when appending a batch from any wallet', async () => { + await stateChain.connect(randomWallet).appendStateBatch(DEFAULT_BATCH) + }) + + it('should throw if submitting an empty batch', async () => { + const emptyBatch = [] + await TestUtils.assertRevertsAsync( + 'Cannot submit an empty state commitment batch', + async () => { + await stateChain.appendStateBatch(emptyBatch) + } + ) + }) + + it('should add to batches array', async () => { + await stateChain.appendStateBatch(DEFAULT_BATCH) + const batchesLength = await stateChain.getBatchesLength() + batchesLength.toNumber().should.equal(1) + }) + + it('should update cumulativeNumElements correctly', async () => { + await stateChain.appendStateBatch(DEFAULT_BATCH) + const cumulativeNumElements = await stateChain.cumulativeNumElements.call() + cumulativeNumElements.toNumber().should.equal(DEFAULT_BATCH.length) + }) + + it('should calculate batchHeaderHash correctly', async () => { + const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + const calculatedBatchHeaderHash = await stateChain.batches(0) + calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + }) + + it('should add multiple batches correctly', async () => { + const numBatchs = 10 + for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { + const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex + const localBatch = await appendAndGenerateBatch( + DEFAULT_BATCH, + batchIndex, + cumulativePrevElements + ) + const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + const calculatedBatchHeaderHash = await stateChain.batches(batchIndex) + calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + } + const cumulativeNumElements = await stateChain.cumulativeNumElements.call() + cumulativeNumElements + .toNumber() + .should.equal(numBatchs * DEFAULT_BATCH.length) + const batchesLength = await stateChain.getBatchesLength() + batchesLength.toNumber().should.equal(numBatchs) + }) + }) +}) From a8c6021cd48ef48213cc3ffa27dc4765def66470 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 28 May 2020 19:23:09 -0400 Subject: [PATCH 32/37] add state chain verify element --- .../contracts/StateCommitmentChain.sol | 54 +++---- .../CanonicalTransactionChain.spec.ts | 2 +- .../rollup-list/StateCommitmentChain.spec.ts | 145 +++++++++++++++++- 3 files changed, 168 insertions(+), 33 deletions(-) diff --git a/packages/rollup-contracts/contracts/StateCommitmentChain.sol b/packages/rollup-contracts/contracts/StateCommitmentChain.sol index c77ee3e5e0603..a9aa8cd351aa9 100644 --- a/packages/rollup-contracts/contracts/StateCommitmentChain.sol +++ b/packages/rollup-contracts/contracts/StateCommitmentChain.sol @@ -4,9 +4,10 @@ pragma experimental ABIEncoderV2; /* Internal Imports */ import {DataTypes as dt} from "./DataTypes.sol"; import {RollupMerkleUtils} from "./RollupMerkleUtils.sol"; +import {CanonicalTransactionChain} from "./CanonicalTransactionChain.sol"; contract StateCommitmentChain { - address public canonicalTransactionChain; + CanonicalTransactionChain canonicalTransactionChain; RollupMerkleUtils public merkleUtils; uint public cumulativeNumElements; bytes32[] public batches; @@ -16,7 +17,7 @@ contract StateCommitmentChain { address _canonicalTransactionChain ) public { merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); - canonicalTransactionChain = _canonicalTransactionChain; + canonicalTransactionChain = CanonicalTransactionChain(_canonicalTransactionChain); } function getBatchesLength() public view returns (uint) { @@ -24,7 +25,7 @@ contract StateCommitmentChain { } function hashBatchHeader( - dt.TxChainBatchHeader memory _batchHeader + dt.StateChainBatchHeader memory _batchHeader ) public pure returns (bytes32) { return keccak256(abi.encodePacked( _batchHeader.elementsMerkleRoot, @@ -34,8 +35,9 @@ contract StateCommitmentChain { } function appendStateBatch(bytes[] memory _stateBatch) public { + require(cumulativeNumElements + _stateBatch.length <= canonicalTransactionChain.cumulativeNumElements(), + "Cannot append more state commitments than total number of transactions in CanonicalTransactionChain"); require(_stateBatch.length > 0, "Cannot submit an empty state commitment batch"); - // TODO Check that number of state commitments is less than or equal to num txs in canonical tx chain bytes32 batchHeaderHash = keccak256(abi.encodePacked( merkleUtils.getMerkleRoot(_stateBatch), // elementsMerkleRoot _stateBatch.length, // numElementsInBatch @@ -45,26 +47,26 @@ contract StateCommitmentChain { cumulativeNumElements += _stateBatch.length; } - // // verifies an element is in the current list at the given position - // function verifyElement( - // bytes memory _element, // the element of the list being proven - // uint _position, // the position in the list of the element being proven - // dt.StateElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch - // ) public view returns (bool) { - // // For convenience, store the batchHeader - // dt.StateChainBatchHeader memory batchHeader = _inclusionProof.batchHeader; - // // make sure absolute position equivalent to relative positions - // if(_position != _inclusionProof.indexInBatch + - // batchHeader.cumulativePrevElements) - // return false; - // // verify elementsMerkleRoot - // if (!merkleUtils.verify( - // batchHeader.elementsMerkleRoot, - // _element, - // _inclusionProof.indexInBatch, - // _inclusionProof.siblings - // )) return false; - // //compare computed batch header with the batch header in the list. - // return hashBatchHeader(batchHeader) == batches[_inclusionProof.batchIndex]; - // } + // verifies an element is in the current list at the given position + function verifyElement( + bytes memory _element, // the element of the list being proven + uint _position, // the position in the list of the element being proven + dt.StateElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch + ) public view returns (bool) { + // For convenience, store the batchHeader + dt.StateChainBatchHeader memory batchHeader = _inclusionProof.batchHeader; + // make sure absolute position equivalent to relative positions + if(_position != _inclusionProof.indexInBatch + + batchHeader.cumulativePrevElements) + return false; + // verify elementsMerkleRoot + if (!merkleUtils.verify( + batchHeader.elementsMerkleRoot, + _element, + _inclusionProof.indexInBatch, + _inclusionProof.siblings + )) return false; + //compare computed batch header with the batch header in the list. + return hashBatchHeader(batchHeader) == batches[_inclusionProof.batchIndex]; + } } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index bb3d43bd82c6c..2edc1130e0d46 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -18,7 +18,7 @@ import * as SafetyTransactionQueue from '../../build/SafetyTransactionQueue.json import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ -describe.only('CanonicalTransactionChain', () => { +describe('CanonicalTransactionChain', () => { const provider = createMockProvider() const [wallet, sequencer, l1ToL2TransactionPasser, randomWallet] = getWallets( provider diff --git a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts index f63712ec6c851..858e1cd89c1da 100644 --- a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts @@ -13,18 +13,33 @@ const log = getLogger('state-commitment-chain', true) /* Contract Imports */ import * as StateCommitmentChain from '../../build/StateCommitmentChain.json' -import * as L1ToL2TransactionQueue from '../../build/L1ToL2TransactionQueue.json' -import * as SafetyTransactionQueue from '../../build/SafetyTransactionQueue.json' +import * as CanonicalTransactionChain from '../../build/CanonicalTransactionChain.json' import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ describe('StateCommitmentChain', () => { const provider = createMockProvider() - const [wallet, canonicalTransactionChain, randomWallet] = getWallets(provider) + const [wallet, sequencer, l1ToL2TransactionPasser, randomWallet] = getWallets( + provider + ) let stateChain + let canonicalTxChain let rollupMerkleUtils const DEFAULT_BATCH = ['0x1234', '0x5678'] + const DEFAULT_TX_BATCH = [ + '0x1234', + '0x5678', + '0x1234', + '0x5678', + '0x1234', + '0x5678', + '0x1234', + '0x5678', + '0x1234', + '0x5678', + ] const DEFAULT_STATE_ROOT = '0x1234' + const LIVENESS_ASSUMPTION = 600 const appendAndGenerateBatch = async ( batch: string[], @@ -42,11 +57,34 @@ describe('StateCommitmentChain', () => { return localBatch } - /* Link libraries before tests */ + const appendTxBatch = async (batch: string[]): Promise => { + const timestamp = Math.floor(Date.now() / 1000) + // Submit the rollup batch on-chain + await canonicalTxChain + .connect(sequencer) + .appendTransactionBatch(batch, timestamp) + } + before(async () => { rollupMerkleUtils = await deployContract(wallet, RollupMerkleUtils, [], { gasLimit: 6700000, }) + + canonicalTxChain = await deployContract( + wallet, + CanonicalTransactionChain, + [ + rollupMerkleUtils.address, + sequencer.address, + l1ToL2TransactionPasser.address, + LIVENESS_ASSUMPTION, + ], + { + gasLimit: 6700000, + } + ) + // length 10 batch + await appendTxBatch(DEFAULT_TX_BATCH) }) /* Deploy a new RollupChain before each test */ @@ -54,7 +92,7 @@ describe('StateCommitmentChain', () => { stateChain = await deployContract( wallet, StateCommitmentChain, - [rollupMerkleUtils.address, canonicalTransactionChain.address], + [rollupMerkleUtils.address, canonicalTxChain.address], { gasLimit: 6700000, } @@ -96,7 +134,7 @@ describe('StateCommitmentChain', () => { }) it('should add multiple batches correctly', async () => { - const numBatchs = 10 + const numBatchs = 5 for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex const localBatch = await appendAndGenerateBatch( @@ -115,5 +153,100 @@ describe('StateCommitmentChain', () => { const batchesLength = await stateChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatchs) }) + + it('should throw if submitting more state commitments than number of txs in canonical tx chain', async () => { + const numBatchs = 5 + for (let i = 0; i < numBatchs; i++) { + await stateChain.appendStateBatch(DEFAULT_BATCH) + } + await TestUtils.assertRevertsAsync( + 'Cannot append more state commitments than total number of transactions in CanonicalTransactionChain', + async () => { + await stateChain.appendStateBatch(DEFAULT_BATCH) + } + ) + }) + }) + + describe('verifyElement() ', async () => { + it('should return true for valid elements for different batches and elements', async () => { + await appendTxBatch(DEFAULT_TX_BATCH) + await appendTxBatch(DEFAULT_TX_BATCH) + const numBatches = 3 + const batch = [ + '0x1234', + '0x4567', + '0x890a', + '0x4567', + '0x890a', + '0xabcd', + '0x1234', + ] + for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { + const cumulativePrevElements = batch.length * batchIndex + const localBatch = await appendAndGenerateBatch( + batch, + batchIndex, + cumulativePrevElements + ) + for ( + let elementIndex = 0; + elementIndex < batch.length; + elementIndex += 3 + ) { + const element = batch[elementIndex] + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( + elementIndex + ) + const isIncluded = await stateChain.verifyElement( + element, + position, + elementInclusionProof + ) + isIncluded.should.equal(true) + } + } + }) + + it('should return false for wrong position with wrong indexInBatch', async () => { + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const localBatch = await appendAndGenerateBatch(batch) + const elementIndex = 1 + const element = batch[elementIndex] + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( + elementIndex + ) + //Give wrong position so inclusion proof is wrong + const wrongPosition = position + 1 + const isIncluded = await stateChain.verifyElement( + element, + wrongPosition, + elementInclusionProof + ) + isIncluded.should.equal(false) + }) + + it('should return false for wrong position and matching indexInBatch', async () => { + const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] + const localBatch = await appendAndGenerateBatch(batch) + const elementIndex = 1 + const element = batch[elementIndex] + const position = localBatch.getPosition(elementIndex) + const elementInclusionProof = await localBatch.getElementInclusionProof( + elementIndex + ) + //Give wrong position so inclusion proof is wrong + const wrongPosition = position + 1 + //Change index to also be false (so position = index + cumulative) + elementInclusionProof.indexInBatch++ + const isIncluded = await stateChain.verifyElement( + element, + wrongPosition, + elementInclusionProof + ) + isIncluded.should.equal(false) + }) }) }) From df764328272354a302637476b752c0498cb63838 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 1 Jun 2020 16:49:56 -0400 Subject: [PATCH 33/37] add deleteAfterInclusive to state commitment chain --- .../contracts/StateCommitmentChain.sol | 17 ++- .../CanonicalTransactionChain.spec.ts | 8 +- .../rollup-list/StateCommitmentChain.spec.ts | 142 ++++++++++++++++-- 3 files changed, 152 insertions(+), 15 deletions(-) diff --git a/packages/rollup-contracts/contracts/StateCommitmentChain.sol b/packages/rollup-contracts/contracts/StateCommitmentChain.sol index a9aa8cd351aa9..a176a0b70c5c3 100644 --- a/packages/rollup-contracts/contracts/StateCommitmentChain.sol +++ b/packages/rollup-contracts/contracts/StateCommitmentChain.sol @@ -9,15 +9,18 @@ import {CanonicalTransactionChain} from "./CanonicalTransactionChain.sol"; contract StateCommitmentChain { CanonicalTransactionChain canonicalTransactionChain; RollupMerkleUtils public merkleUtils; + address public fraudVerifier; uint public cumulativeNumElements; bytes32[] public batches; constructor( address _rollupMerkleUtilsAddress, - address _canonicalTransactionChain + address _canonicalTransactionChain, + address _fraudVerifier ) public { merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress); canonicalTransactionChain = CanonicalTransactionChain(_canonicalTransactionChain); + fraudVerifier = _fraudVerifier; } function getBatchesLength() public view returns (uint) { @@ -69,4 +72,16 @@ contract StateCommitmentChain { //compare computed batch header with the batch header in the list. return hashBatchHeader(batchHeader) == batches[_inclusionProof.batchIndex]; } + + function deleteAfterInclusive( + uint _batchIndex, + dt.StateChainBatchHeader memory _batchHeader + ) public { + require(msg.sender == fraudVerifier, "Only FraudVerifier has permission to delete state batches"); + require(_batchIndex < batches.length, "Cannot delete batches outside of valid range"); + bytes32 calculatedBatchHeaderHash = hashBatchHeader(_batchHeader); + require(calculatedBatchHeaderHash == batches[_batchIndex], "Calculated batch header is different than expected batch header"); + batches.length = _batchIndex; + cumulativeNumElements = _batchHeader.cumulativePrevElements; + } } diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 2edc1130e0d46..a338a20cb49fb 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -221,8 +221,8 @@ describe('CanonicalTransactionChain', () => { }) it('should add multiple batches correctly', async () => { - const numBatchs = 10 - for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { + const numBatches = 10 + for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex const localBatch = await appendAndGenerateBatch( DEFAULT_BATCH, @@ -238,9 +238,9 @@ describe('CanonicalTransactionChain', () => { const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements .toNumber() - .should.equal(numBatchs * DEFAULT_BATCH.length) + .should.equal(numBatches * DEFAULT_BATCH.length) const batchesLength = await canonicalTxChain.getBatchesLength() - batchesLength.toNumber().should.equal(numBatchs) + batchesLength.toNumber().should.equal(numBatches) }) describe('when there is a batch in the L1toL2Queue', async () => { diff --git a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts index 858e1cd89c1da..3b2903ef716df 100644 --- a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts @@ -19,9 +19,13 @@ import * as RollupMerkleUtils from '../../build/RollupMerkleUtils.json' /* Begin tests */ describe('StateCommitmentChain', () => { const provider = createMockProvider() - const [wallet, sequencer, l1ToL2TransactionPasser, randomWallet] = getWallets( - provider - ) + const [ + wallet, + sequencer, + l1ToL2TransactionPasser, + fraudVerifier, + randomWallet, + ] = getWallets(provider) let stateChain let canonicalTxChain let rollupMerkleUtils @@ -92,7 +96,11 @@ describe('StateCommitmentChain', () => { stateChain = await deployContract( wallet, StateCommitmentChain, - [rollupMerkleUtils.address, canonicalTxChain.address], + [ + rollupMerkleUtils.address, + canonicalTxChain.address, + fraudVerifier.address, + ], { gasLimit: 6700000, } @@ -134,8 +142,8 @@ describe('StateCommitmentChain', () => { }) it('should add multiple batches correctly', async () => { - const numBatchs = 5 - for (let batchIndex = 0; batchIndex < numBatchs; batchIndex++) { + const numBatches = 5 + for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex const localBatch = await appendAndGenerateBatch( DEFAULT_BATCH, @@ -149,14 +157,14 @@ describe('StateCommitmentChain', () => { const cumulativeNumElements = await stateChain.cumulativeNumElements.call() cumulativeNumElements .toNumber() - .should.equal(numBatchs * DEFAULT_BATCH.length) + .should.equal(numBatches * DEFAULT_BATCH.length) const batchesLength = await stateChain.getBatchesLength() - batchesLength.toNumber().should.equal(numBatchs) + batchesLength.toNumber().should.equal(numBatches) }) it('should throw if submitting more state commitments than number of txs in canonical tx chain', async () => { - const numBatchs = 5 - for (let i = 0; i < numBatchs; i++) { + const numBatches = 5 + for (let i = 0; i < numBatches; i++) { await stateChain.appendStateBatch(DEFAULT_BATCH) } await TestUtils.assertRevertsAsync( @@ -249,4 +257,118 @@ describe('StateCommitmentChain', () => { isIncluded.should.equal(false) }) }) + + describe('deleteAfterInclusive() ', async () => { + it('should not allow deletion from non-fraudVerifier', async () => { + const cumulativePrevElements = 0 + const batchIndex = 0 + const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const batchHeader = { + elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), + numElementsInBatch: DEFAULT_BATCH.length, + cumulativePrevElements, + } + await TestUtils.assertRevertsAsync( + 'Only FraudVerifier has permission to delete state batches', + async () => { + await stateChain.connect(randomWallet).deleteAfterInclusive( + batchIndex, // delete the single appended batch + batchHeader + ) + } + ) + }) + describe('when a single batch is deleted', async () => { + beforeEach(async () => { + const cumulativePrevElements = 0 + const batchIndex = 0 + const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const batchHeader = { + elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), + numElementsInBatch: DEFAULT_BATCH.length, + cumulativePrevElements, + } + await stateChain.connect(fraudVerifier).deleteAfterInclusive( + batchIndex, // delete the single appended batch + batchHeader + ) + }) + + it('should successfully update the batches array', async () => { + const batchesLength = await stateChain.getBatchesLength() + batchesLength.should.equal(0) + }) + + it('should successfully append a batch after deletion', async () => { + const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const expectedBatchHeaderHash = await localBatch.hashBatchHeader() + const calculatedBatchHeaderHash = await stateChain.batches(0) + calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + }) + }) + + it('should delete many batches', async () => { + const deleteBatchIndex = 0 + const localBatches = [] + for (let batchIndex = 0; batchIndex < 5; batchIndex++) { + const cumulativePrevElements = batchIndex * DEFAULT_BATCH.length + const localBatch = await appendAndGenerateBatch( + DEFAULT_BATCH, + batchIndex, + cumulativePrevElements + ) + localBatches.push(localBatch) + } + const deleteBatch = localBatches[deleteBatchIndex] + const batchHeader = { + elementsMerkleRoot: deleteBatch.elementsMerkleTree.getRootHash(), + numElementsInBatch: DEFAULT_BATCH.length, + cumulativePrevElements: deleteBatch.cumulativePrevElements, + } + await stateChain.connect(fraudVerifier).deleteAfterInclusive( + deleteBatchIndex, // delete all batches (including and after batch 0) + batchHeader + ) + const batchesLength = await stateChain.getBatchesLength() + batchesLength.should.equal(0) + }) + it('should fail if batchHeader is incorrect', async () => { + const cumulativePrevElements = 0 + const batchIndex = 0 + const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const batchHeader = { + elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), + numElementsInBatch: DEFAULT_BATCH.length + 1, // increment to make header incorrect + cumulativePrevElements, + } + await TestUtils.assertRevertsAsync( + 'Calculated batch header is different than expected batch header', + async () => { + await stateChain.connect(fraudVerifier).deleteAfterInclusive( + batchIndex, // delete the single appended batch + batchHeader + ) + } + ) + }) + it('should fail if trying to delete a batch outside of valid range', async () => { + const cumulativePrevElements = 0 + const batchIndex = 1 // outside of range + const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const batchHeader = { + elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), + numElementsInBatch: DEFAULT_BATCH.length + 1, // increment to make header incorrect + cumulativePrevElements, + } + await TestUtils.assertRevertsAsync( + 'Cannot delete batches outside of valid range', + async () => { + await stateChain.connect(fraudVerifier).deleteAfterInclusive( + batchIndex, // delete the single appended batch + batchHeader + ) + } + ) + }) + }) }) From 705c1540ee322e3503bb0e4bcf28079ce01496d0 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Mon, 1 Jun 2020 19:04:03 -0400 Subject: [PATCH 34/37] clean up unit tests --- .../contracts/CanonicalTransactionChain.sol | 2 +- .../rollup-contracts/contracts/DataTypes.sol | 15 ++-- .../contracts/StateCommitmentChain.sol | 5 +- .../CanonicalTransactionChain.spec.ts | 79 ++++++++++--------- .../test/rollup-list/RLhelper.ts | 6 +- .../test/rollup-list/RollupQueue.spec.ts | 3 - .../rollup-list/StateCommitmentChain.spec.ts | 69 ++++++++-------- 7 files changed, 87 insertions(+), 92 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index d29cfd49a8138..ca475879efd86 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -119,7 +119,7 @@ contract CanonicalTransactionChain { function verifyElement( bytes memory _element, // the element of the list being proven uint _position, // the position in the list of the element being proven - dt.ElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch + dt.TxElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch ) public view returns (bool) { // For convenience, store the batchHeader dt.TxChainBatchHeader memory batchHeader = _inclusionProof.batchHeader; diff --git a/packages/rollup-contracts/contracts/DataTypes.sol b/packages/rollup-contracts/contracts/DataTypes.sol index 6048508aa7e3a..b5bee2b071891 100644 --- a/packages/rollup-contracts/contracts/DataTypes.sol +++ b/packages/rollup-contracts/contracts/DataTypes.sol @@ -3,7 +3,6 @@ pragma experimental ABIEncoderV2; /** * @title DataTypes - * @notice TODO */ contract DataTypes { struct L2ToL1Message { @@ -11,18 +10,18 @@ contract DataTypes { bytes callData; } - struct ElementInclusionProof { - uint batchIndex; // index in batches array (first batch has batchNumber of 0) + struct TxElementInclusionProof { + uint batchIndex; TxChainBatchHeader batchHeader; - uint indexInBatch; // used to verify inclusion of the element in elementsMerkleRoot - bytes32[] siblings; // used to verify inclusion of the element in elementsMerkleRoot + uint indexInBatch; + bytes32[] siblings; } struct StateElementInclusionProof { - uint batchIndex; // index in batches array (first batch has batchNumber of 0) + uint batchIndex; StateChainBatchHeader batchHeader; - uint indexInBatch; // used to verify inclusion of the element in elementsMerkleRoot - bytes32[] siblings; // used to verify inclusion of the element in elementsMerkleRoot + uint indexInBatch; + bytes32[] siblings; } struct StateChainBatchHeader { diff --git a/packages/rollup-contracts/contracts/StateCommitmentChain.sol b/packages/rollup-contracts/contracts/StateCommitmentChain.sol index a176a0b70c5c3..d05e4172e910f 100644 --- a/packages/rollup-contracts/contracts/StateCommitmentChain.sol +++ b/packages/rollup-contracts/contracts/StateCommitmentChain.sol @@ -54,15 +54,12 @@ contract StateCommitmentChain { function verifyElement( bytes memory _element, // the element of the list being proven uint _position, // the position in the list of the element being proven - dt.StateElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch + dt.StateElementInclusionProof memory _inclusionProof ) public view returns (bool) { - // For convenience, store the batchHeader dt.StateChainBatchHeader memory batchHeader = _inclusionProof.batchHeader; - // make sure absolute position equivalent to relative positions if(_position != _inclusionProof.indexInBatch + batchHeader.cumulativePrevElements) return false; - // verify elementsMerkleRoot if (!merkleUtils.verify( batchHeader.elementsMerkleRoot, _element, diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index a338a20cb49fb..9befd274b97e0 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -6,7 +6,7 @@ import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' import { Contract } from 'ethers' /* Internal Imports */ -import { CanonicalTxBatch, TxQueueBatch } from './RLhelper' +import { TxChainBatch, TxQueueBatch } from './RLhelper' /* Logging */ const log = getLogger('canonical-tx-chain', true) @@ -27,7 +27,7 @@ describe('CanonicalTransactionChain', () => { let rollupMerkleUtils let l1ToL2Queue let safetyQueue - const LIVENESS_ASSUMPTION = 600 //600 seconds = 10 minutes + const FORCE_INCLUSION_PERIOD = 600 //600 seconds = 10 minutes const DEFAULT_BATCH = ['0x1234', '0x5678'] const DEFAULT_TX = '0x1234' @@ -44,10 +44,10 @@ describe('CanonicalTransactionChain', () => { batch: string[], batchIndex: number = 0, cumulativePrevElements: number = 0 - ): Promise => { + ): Promise => { const timestamp = await appendBatch(batch) // Generate a local version of the rollup batch - const localBatch = new CanonicalTxBatch( + const localBatch = new TxChainBatch( timestamp, false, batchIndex, @@ -104,7 +104,7 @@ describe('CanonicalTransactionChain', () => { rollupMerkleUtils.address, sequencer.address, l1ToL2TransactionPasser.address, - LIVENESS_ASSUMPTION, + FORCE_INCLUSION_PERIOD, ], { gasLimit: 6700000, @@ -143,7 +143,7 @@ describe('CanonicalTransactionChain', () => { it('should revert if submitting a batch older than the inclusion period', async () => { const timestamp = Math.floor(Date.now() / 1000) - const oldTimestamp = timestamp - (LIVENESS_ASSUMPTION + 1) + const oldTimestamp = timestamp - (FORCE_INCLUSION_PERIOD + 1) await TestUtils.assertRevertsAsync( 'Cannot submit a batch with a timestamp older than the sequencer inclusion period', async () => { @@ -156,7 +156,7 @@ describe('CanonicalTransactionChain', () => { it('should not revert if submitting a 5 minute old batch', async () => { const timestamp = Math.floor(Date.now() / 1000) - const oldTimestamp = timestamp - LIVENESS_ASSUMPTION / 2 + const oldTimestamp = timestamp - FORCE_INCLUSION_PERIOD / 2 await canonicalTxChain .connect(sequencer) .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) @@ -175,7 +175,7 @@ describe('CanonicalTransactionChain', () => { ) }) - it('should revert if submitting a new batch with a timestamp less than latest batch timestamp', async () => { + it('should revert if submitting a new batch with a timestamp older than last batch timestamp', async () => { const timestamp = await appendBatch(DEFAULT_BATCH) const oldTimestamp = timestamp - 1 await TestUtils.assertRevertsAsync( @@ -264,7 +264,7 @@ describe('CanonicalTransactionChain', () => { it('should revert when there is an older batch in the L1ToL2Queue', async () => { const snapshotID = await provider.send('evm_snapshot', []) - await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD]) const newTimestamp = localBatch.timestamp + 60 await TestUtils.assertRevertsAsync( 'Must process older L1ToL2Queue batches first to enforce timestamp monotonicity', @@ -299,7 +299,7 @@ describe('CanonicalTransactionChain', () => { it('should revert when there is an older batch in the SafetyQueue', async () => { const snapshotID = await provider.send('evm_snapshot', []) - await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD]) const newTimestamp = localBatch.timestamp + 60 await TestUtils.assertRevertsAsync( 'Must process older SafetyQueue batches first to enforce timestamp monotonicity', @@ -354,7 +354,7 @@ describe('CanonicalTransactionChain', () => { it('should successfully append a L1ToL2Batch', async () => { const { timestamp, txHash } = await l1ToL2Queue.batchHeaders(0) - const localBatch = new CanonicalTxBatch( + const localBatch = new TxChainBatch( timestamp, true, // isL1ToL2Tx 0, //batchIndex @@ -379,33 +379,35 @@ describe('CanonicalTransactionChain', () => { it('should allow non-sequencer to appendL1ToL2Batch after inclusion period has elapsed', async () => { const snapshotID = await provider.send('evm_snapshot', []) - await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD]) await canonicalTxChain.appendL1ToL2Batch() await provider.send('evm_revert', [snapshotID]) }) }) - it('should revert when there is an older batch in the SafetyQueue ', async () => { - const snapshotID = await provider.send('evm_snapshot', []) - await enqueueAndGenerateSafetyBatch(DEFAULT_TX) - await provider.send('evm_increaseTime', [10]) - await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) - await TestUtils.assertRevertsAsync( - 'Must process older SafetyQueue batches first to enforce timestamp monotonicity', - async () => { - await canonicalTxChain.appendL1ToL2Batch() - } - ) - await provider.send('evm_revert', [snapshotID]) - }) + describe('when there is a batch in both the SafetyQueue and L1toL2Queue', async () => { + it('should revert when the SafetyQueue batch is older', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + await provider.send('evm_increaseTime', [10]) + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + await TestUtils.assertRevertsAsync( + 'Must process older SafetyQueue batches first to enforce timestamp monotonicity', + async () => { + await canonicalTxChain.appendL1ToL2Batch() + } + ) + await provider.send('evm_revert', [snapshotID]) + }) - it('should succeed when there are only newer batches in the SafetyQueue ', async () => { - const snapshotID = await provider.send('evm_snapshot', []) - await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) - await provider.send('evm_increaseTime', [10]) - await enqueueAndGenerateSafetyBatch(DEFAULT_TX) - await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() - await provider.send('evm_revert', [snapshotID]) + it('should succeed when the L1ToL2Queue batch is older', async () => { + const snapshotID = await provider.send('evm_snapshot', []) + await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + await provider.send('evm_increaseTime', [10]) + await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() + await provider.send('evm_revert', [snapshotID]) + }) }) it('should revert when L1ToL2TxQueue is empty', async () => { @@ -437,7 +439,7 @@ describe('CanonicalTransactionChain', () => { it('should successfully append a SafetyBatch', async () => { const { timestamp, txHash } = await safetyQueue.batchHeaders(0) - const localBatch = new CanonicalTxBatch( + const localBatch = new TxChainBatch( timestamp, false, // isL1ToL2Tx 0, //batchIndex @@ -451,7 +453,7 @@ describe('CanonicalTransactionChain', () => { batchHeaderHash.should.equal(localBatchHeaderHash) }) - it('should not allow non-sequencer to appendSafetyBatch if less than 10 minutes old', async () => { + it('should not allow non-sequencer to appendSafetyBatch if less than force inclusion period', async () => { await TestUtils.assertRevertsAsync( 'Message sender does not have permission to append this batch', async () => { @@ -459,9 +461,10 @@ describe('CanonicalTransactionChain', () => { } ) }) - it('should allow non-sequencer to appendSafetyBatch after 10 minutes have elapsed', async () => { + + it('should allow non-sequencer to appendSafetyBatch after force inclusion period has elapsed', async () => { const snapshotID = await provider.send('evm_snapshot', []) - await provider.send('evm_increaseTime', [LIVENESS_ASSUMPTION]) + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD]) await canonicalTxChain.appendSafetyBatch() await provider.send('evm_revert', [snapshotID]) }) @@ -542,7 +545,7 @@ describe('CanonicalTransactionChain', () => { it('should return true for valid element from a l1ToL2Batch', async () => { const l1ToL2Batch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) await canonicalTxChain.connect(sequencer).appendL1ToL2Batch() - const localBatch = new CanonicalTxBatch( + const localBatch = new TxChainBatch( l1ToL2Batch.timestamp, //timestamp true, //isL1ToL2Tx 0, //batchIndex @@ -566,7 +569,7 @@ describe('CanonicalTransactionChain', () => { it('should return true for valid element from a SafetyBatch', async () => { const safetyBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) await canonicalTxChain.connect(sequencer).appendSafetyBatch() - const localBatch = new CanonicalTxBatch( + const localBatch = new TxChainBatch( safetyBatch.timestamp, //timestamp false, //isL1ToL2Tx 0, //batchIndex diff --git a/packages/rollup-contracts/test/rollup-list/RLhelper.ts b/packages/rollup-contracts/test/rollup-list/RLhelper.ts index 8355f169a7f4e..0c619d5a72e04 100644 --- a/packages/rollup-contracts/test/rollup-list/RLhelper.ts +++ b/packages/rollup-contracts/test/rollup-list/RLhelper.ts @@ -94,10 +94,6 @@ export class ChainBatch { return siblings } - public async hashBatchHeader(): Promise { - return '' - } - /* * elementIndex is the index in this batch of the element * that we want to create an inclusion proof for. @@ -124,7 +120,7 @@ export class ChainBatch { * Rollup batch. This includes all of the transactions in readable form * as well as the merkle tree which it generates. */ -export class CanonicalTxBatch extends ChainBatch { +export class TxChainBatch extends ChainBatch { public timestamp: number public isL1ToL2Tx: boolean diff --git a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts index 11ea6821acbd7..13f33aedad7ef 100644 --- a/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/RollupQueue.spec.ts @@ -50,9 +50,6 @@ describe('RollupQueue', () => { } describe('enqueueTx() ', async () => { - it('should not throw as long as it gets a bytes array (even if its invalid)', async () => { - await rollupQueue.enqueueTx(DEFAULT_TX) - }) it('should add to batchHeaders array', async () => { await rollupQueue.enqueueTx(DEFAULT_TX) const batchesLength = await rollupQueue.getBatchHeadersLength() diff --git a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts index 3b2903ef716df..7eb08caa72b22 100644 --- a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts @@ -29,7 +29,7 @@ describe('StateCommitmentChain', () => { let stateChain let canonicalTxChain let rollupMerkleUtils - const DEFAULT_BATCH = ['0x1234', '0x5678'] + const DEFAULT_STATE_BATCH = ['0x1234', '0x5678'] const DEFAULT_TX_BATCH = [ '0x1234', '0x5678', @@ -43,7 +43,7 @@ describe('StateCommitmentChain', () => { '0x5678', ] const DEFAULT_STATE_ROOT = '0x1234' - const LIVENESS_ASSUMPTION = 600 + const FORCE_INCLUSION_PERIOD = 600 const appendAndGenerateBatch = async ( batch: string[], @@ -81,7 +81,7 @@ describe('StateCommitmentChain', () => { rollupMerkleUtils.address, sequencer.address, l1ToL2TransactionPasser.address, - LIVENESS_ASSUMPTION, + FORCE_INCLUSION_PERIOD, ], { gasLimit: 6700000, @@ -108,8 +108,10 @@ describe('StateCommitmentChain', () => { }) describe('appendStateBatch()', async () => { - it('should not throw when appending a batch from any wallet', async () => { - await stateChain.connect(randomWallet).appendStateBatch(DEFAULT_BATCH) + it('should allow appending of state batches from any wallet', async () => { + await stateChain + .connect(randomWallet) + .appendStateBatch(DEFAULT_STATE_BATCH) }) it('should throw if submitting an empty batch', async () => { @@ -123,19 +125,19 @@ describe('StateCommitmentChain', () => { }) it('should add to batches array', async () => { - await stateChain.appendStateBatch(DEFAULT_BATCH) + await stateChain.appendStateBatch(DEFAULT_STATE_BATCH) const batchesLength = await stateChain.getBatchesLength() batchesLength.toNumber().should.equal(1) }) it('should update cumulativeNumElements correctly', async () => { - await stateChain.appendStateBatch(DEFAULT_BATCH) + await stateChain.appendStateBatch(DEFAULT_STATE_BATCH) const cumulativeNumElements = await stateChain.cumulativeNumElements.call() - cumulativeNumElements.toNumber().should.equal(DEFAULT_BATCH.length) + cumulativeNumElements.toNumber().should.equal(DEFAULT_STATE_BATCH.length) }) it('should calculate batchHeaderHash correctly', async () => { - const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await stateChain.batches(0) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) @@ -144,9 +146,9 @@ describe('StateCommitmentChain', () => { it('should add multiple batches correctly', async () => { const numBatches = 5 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex + const cumulativePrevElements = DEFAULT_STATE_BATCH.length * batchIndex const localBatch = await appendAndGenerateBatch( - DEFAULT_BATCH, + DEFAULT_STATE_BATCH, batchIndex, cumulativePrevElements ) @@ -157,7 +159,7 @@ describe('StateCommitmentChain', () => { const cumulativeNumElements = await stateChain.cumulativeNumElements.call() cumulativeNumElements .toNumber() - .should.equal(numBatches * DEFAULT_BATCH.length) + .should.equal(numBatches * DEFAULT_STATE_BATCH.length) const batchesLength = await stateChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatches) }) @@ -165,12 +167,12 @@ describe('StateCommitmentChain', () => { it('should throw if submitting more state commitments than number of txs in canonical tx chain', async () => { const numBatches = 5 for (let i = 0; i < numBatches; i++) { - await stateChain.appendStateBatch(DEFAULT_BATCH) + await stateChain.appendStateBatch(DEFAULT_STATE_BATCH) } await TestUtils.assertRevertsAsync( 'Cannot append more state commitments than total number of transactions in CanonicalTransactionChain', async () => { - await stateChain.appendStateBatch(DEFAULT_BATCH) + await stateChain.appendStateBatch(DEFAULT_STATE_BATCH) } ) }) @@ -259,13 +261,13 @@ describe('StateCommitmentChain', () => { }) describe('deleteAfterInclusive() ', async () => { - it('should not allow deletion from non-fraudVerifier', async () => { + it('should not allow deletion from address other than fraud verifier', async () => { const cumulativePrevElements = 0 const batchIndex = 0 - const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), - numElementsInBatch: DEFAULT_BATCH.length, + numElementsInBatch: DEFAULT_STATE_BATCH.length, cumulativePrevElements, } await TestUtils.assertRevertsAsync( @@ -282,10 +284,10 @@ describe('StateCommitmentChain', () => { beforeEach(async () => { const cumulativePrevElements = 0 const batchIndex = 0 - const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), - numElementsInBatch: DEFAULT_BATCH.length, + numElementsInBatch: DEFAULT_STATE_BATCH.length, cumulativePrevElements, } await stateChain.connect(fraudVerifier).deleteAfterInclusive( @@ -300,7 +302,7 @@ describe('StateCommitmentChain', () => { }) it('should successfully append a batch after deletion', async () => { - const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await stateChain.batches(0) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) @@ -311,9 +313,9 @@ describe('StateCommitmentChain', () => { const deleteBatchIndex = 0 const localBatches = [] for (let batchIndex = 0; batchIndex < 5; batchIndex++) { - const cumulativePrevElements = batchIndex * DEFAULT_BATCH.length + const cumulativePrevElements = batchIndex * DEFAULT_STATE_BATCH.length const localBatch = await appendAndGenerateBatch( - DEFAULT_BATCH, + DEFAULT_STATE_BATCH, batchIndex, cumulativePrevElements ) @@ -322,7 +324,7 @@ describe('StateCommitmentChain', () => { const deleteBatch = localBatches[deleteBatchIndex] const batchHeader = { elementsMerkleRoot: deleteBatch.elementsMerkleTree.getRootHash(), - numElementsInBatch: DEFAULT_BATCH.length, + numElementsInBatch: DEFAULT_STATE_BATCH.length, cumulativePrevElements: deleteBatch.cumulativePrevElements, } await stateChain.connect(fraudVerifier).deleteAfterInclusive( @@ -332,13 +334,14 @@ describe('StateCommitmentChain', () => { const batchesLength = await stateChain.getBatchesLength() batchesLength.should.equal(0) }) - it('should fail if batchHeader is incorrect', async () => { + + it('should revert if batchHeader is incorrect', async () => { const cumulativePrevElements = 0 const batchIndex = 0 - const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), - numElementsInBatch: DEFAULT_BATCH.length + 1, // increment to make header incorrect + numElementsInBatch: DEFAULT_STATE_BATCH.length + 1, // increment to make header incorrect cumulativePrevElements, } await TestUtils.assertRevertsAsync( @@ -351,22 +354,22 @@ describe('StateCommitmentChain', () => { } ) }) - it('should fail if trying to delete a batch outside of valid range', async () => { + + it('should revert if trying to delete a batch outside of valid range', async () => { const cumulativePrevElements = 0 const batchIndex = 1 // outside of range - const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), - numElementsInBatch: DEFAULT_BATCH.length + 1, // increment to make header incorrect + numElementsInBatch: DEFAULT_STATE_BATCH.length + 1, // increment to make header incorrect cumulativePrevElements, } await TestUtils.assertRevertsAsync( 'Cannot delete batches outside of valid range', async () => { - await stateChain.connect(fraudVerifier).deleteAfterInclusive( - batchIndex, // delete the single appended batch - batchHeader - ) + await stateChain + .connect(fraudVerifier) + .deleteAfterInclusive(batchIndex, batchHeader) } ) }) From 576c035586f50313bbd6a898ad307d6b2487faaf Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Wed, 3 Jun 2020 10:33:53 -0400 Subject: [PATCH 35/37] renaming functions, add more timestamp unit tests --- .../contracts/CanonicalTransactionChain.sol | 2 +- .../CanonicalTransactionChain.spec.ts | 161 +++++++++++++----- .../rollup-list/StateCommitmentChain.spec.ts | 31 ++-- 3 files changed, 142 insertions(+), 52 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index ca475879efd86..a7006bde45754 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -91,7 +91,7 @@ contract CanonicalTransactionChain { cumulativeNumElements += numElementsInBatch; } - function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public { + function appendSequencerBatch(bytes[] memory _txBatch, uint _timestamp) public { require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch"); require(_txBatch.length > 0, "Cannot submit an empty batch"); require(_timestamp + forceInclusionPeriod > now, "Cannot submit a batch with a timestamp older than the sequencer inclusion period"); diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index 9befd274b97e0..aa65f106cb483 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -31,21 +31,21 @@ describe('CanonicalTransactionChain', () => { const DEFAULT_BATCH = ['0x1234', '0x5678'] const DEFAULT_TX = '0x1234' - const appendBatch = async (batch: string[]): Promise => { + const appendSequencerBatch = async (batch: string[]): Promise => { const timestamp = Math.floor(Date.now() / 1000) // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, timestamp) + .appendSequencerBatch(batch, timestamp) return timestamp } - const appendAndGenerateBatch = async ( + const appendAndGenerateSequencerBatch = async ( batch: string[], batchIndex: number = 0, cumulativePrevElements: number = 0 ): Promise => { - const timestamp = await appendBatch(batch) + const timestamp = await appendSequencerBatch(batch) // Generate a local version of the rollup batch const localBatch = new TxChainBatch( timestamp, @@ -126,9 +126,9 @@ describe('CanonicalTransactionChain', () => { ) }) - describe('appendTransactionBatch()', async () => { + describe('appendSequencerBatch()', async () => { it('should not throw when appending a batch from the sequencer', async () => { - await appendBatch(DEFAULT_BATCH) + await appendSequencerBatch(DEFAULT_BATCH) }) it('should throw if submitting an empty batch', async () => { @@ -136,7 +136,7 @@ describe('CanonicalTransactionChain', () => { await TestUtils.assertRevertsAsync( 'Cannot submit an empty batch', async () => { - await appendBatch(emptyBatch) + await appendSequencerBatch(emptyBatch) } ) }) @@ -149,7 +149,7 @@ describe('CanonicalTransactionChain', () => { async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) } ) }) @@ -159,7 +159,7 @@ describe('CanonicalTransactionChain', () => { const oldTimestamp = timestamp - FORCE_INCLUSION_PERIOD / 2 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) }) it('should revert if submitting a batch with a future timestamp', async () => { @@ -170,51 +170,48 @@ describe('CanonicalTransactionChain', () => { async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, futureTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, futureTimestamp) } ) }) it('should revert if submitting a new batch with a timestamp older than last batch timestamp', async () => { - const timestamp = await appendBatch(DEFAULT_BATCH) + const timestamp = await appendSequencerBatch(DEFAULT_BATCH) const oldTimestamp = timestamp - 1 await TestUtils.assertRevertsAsync( 'Timestamps must monotonically increase', async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) } ) }) it('should add to batches array', async () => { - await appendBatch(DEFAULT_BATCH) + await appendSequencerBatch(DEFAULT_BATCH) const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(1) }) it('should update cumulativeNumElements correctly', async () => { - await appendBatch(DEFAULT_BATCH) + await appendSequencerBatch(DEFAULT_BATCH) const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() cumulativeNumElements.toNumber().should.equal(DEFAULT_BATCH.length) }) - it('should not allow appendTransactionBatch from non-sequencer', async () => { + it('should not allow appendSequencerBatch from non-sequencer', async () => { const timestamp = Math.floor(Date.now() / 1000) await TestUtils.assertRevertsAsync( 'Message sender does not have permission to append a batch', async () => { - await canonicalTxChain.appendTransactionBatch( - DEFAULT_BATCH, - timestamp - ) + await canonicalTxChain.appendSequencerBatch(DEFAULT_BATCH, timestamp) } ) }) it('should calculate batchHeaderHash correctly', async () => { - const localBatch = await appendAndGenerateBatch(DEFAULT_BATCH) + const localBatch = await appendAndGenerateSequencerBatch(DEFAULT_BATCH) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await canonicalTxChain.batches(0) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) @@ -224,7 +221,7 @@ describe('CanonicalTransactionChain', () => { const numBatches = 10 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex - const localBatch = await appendAndGenerateBatch( + const localBatch = await appendAndGenerateSequencerBatch( DEFAULT_BATCH, batchIndex, cumulativePrevElements @@ -253,13 +250,13 @@ describe('CanonicalTransactionChain', () => { const oldTimestamp = localBatch.timestamp - 1 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) }) it('should succesfully append a batch with an equal timestamp', async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, localBatch.timestamp) + .appendSequencerBatch(DEFAULT_BATCH, localBatch.timestamp) }) it('should revert when there is an older batch in the L1ToL2Queue', async () => { @@ -271,7 +268,7 @@ describe('CanonicalTransactionChain', () => { async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, newTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, newTimestamp) } ) await provider.send('evm_revert', [snapshotID]) @@ -288,13 +285,13 @@ describe('CanonicalTransactionChain', () => { const oldTimestamp = localBatch.timestamp - 1 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) }) it('should succesfully append a batch with an equal timestamp', async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, localBatch.timestamp) + .appendSequencerBatch(DEFAULT_BATCH, localBatch.timestamp) }) it('should revert when there is an older batch in the SafetyQueue', async () => { @@ -306,31 +303,119 @@ describe('CanonicalTransactionChain', () => { async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, newTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, newTimestamp) } ) await provider.send('evm_revert', [snapshotID]) }) }) - describe('when both the safetyQueue and l1ToL2Queue are not empty', async () => { - let timestamp + describe('when there is an old batch in the safetyQueue and a recent batch in the l1ToL2Queue', async () => { + let safetyTimestamp + let l1ToL2Timestamp + let snapshotID beforeEach(async () => { - const localBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) - timestamp = localBatch.timestamp - await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + const localSafetyBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + safetyTimestamp = localSafetyBatch.timestamp + snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD / 2]) + const localL1ToL2Batch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + l1ToL2Timestamp = localL1ToL2Batch.timestamp + }) + afterEach(async () => { + await provider.send('evm_revert', [snapshotID]) }) it('should succesfully append a batch with an older timestamp than the oldest batch', async () => { - const oldTimestamp = timestamp - 1 + const oldTimestamp = safetyTimestamp - 1 await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, oldTimestamp) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) }) it('should succesfully append a batch with a timestamp equal to the oldest batch', async () => { await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(DEFAULT_BATCH, timestamp) + .appendSequencerBatch(DEFAULT_BATCH, safetyTimestamp) + }) + + it('should revert when appending a batch with a timestamp in between the two batches', async () => { + const middleTimestamp = safetyTimestamp + 1 + await TestUtils.assertRevertsAsync( + 'Must process older SafetyQueue batches first to enforce timestamp monotonicity', + async () => { + await canonicalTxChain + .connect(sequencer) + .appendSequencerBatch(DEFAULT_BATCH, middleTimestamp) + } + ) + }) + + it('should revert when appending a batch with a timestamp newer than both batches', async () => { + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD / 10]) // increase time by 60 seconds + const oldTimestamp = l1ToL2Timestamp + 1 + await TestUtils.assertRevertsAsync( + 'Must process older L1ToL2Queue batches first to enforce timestamp monotonicity', + async () => { + await canonicalTxChain + .connect(sequencer) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) + } + ) + }) + }) + + describe('when there is an old batch in the l1ToL2Queue and a recent batch in the safetyQueue', async () => { + let l1ToL2Timestamp + let safetyTimestamp + let snapshotID + beforeEach(async () => { + const localL1ToL2Batch = await enqueueAndGenerateL1ToL2Batch(DEFAULT_TX) + l1ToL2Timestamp = localL1ToL2Batch.timestamp + snapshotID = await provider.send('evm_snapshot', []) + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD / 2]) + const localSafetyBatch = await enqueueAndGenerateSafetyBatch(DEFAULT_TX) + safetyTimestamp = localSafetyBatch.timestamp + }) + afterEach(async () => { + await provider.send('evm_revert', [snapshotID]) + }) + + it('should succesfully append a batch with an older timestamp than both batches', async () => { + const oldTimestamp = l1ToL2Timestamp - 1 + await canonicalTxChain + .connect(sequencer) + .appendSequencerBatch(DEFAULT_BATCH, oldTimestamp) + }) + + it('should succesfully append a batch with a timestamp equal to the older batch', async () => { + await canonicalTxChain + .connect(sequencer) + .appendSequencerBatch(DEFAULT_BATCH, l1ToL2Timestamp) + }) + + it('should revert when appending a batch with a timestamp in between the two batches', async () => { + const middleTimestamp = l1ToL2Timestamp + 1 + await TestUtils.assertRevertsAsync( + 'Must process older L1ToL2Queue batches first to enforce timestamp monotonicity', + async () => { + await canonicalTxChain + .connect(sequencer) + .appendSequencerBatch(DEFAULT_BATCH, middleTimestamp) + } + ) + }) + + it('should revert when appending a batch with a timestamp newer than both batches', async () => { + await provider.send('evm_increaseTime', [FORCE_INCLUSION_PERIOD / 10]) // increase time by 60 seconds + const newTimestamp = safetyTimestamp + 1 + await TestUtils.assertRevertsAsync( + 'Must process older L1ToL2Queue batches first to enforce timestamp monotonicity', + async () => { + await canonicalTxChain + .connect(sequencer) + .appendSequencerBatch(DEFAULT_BATCH, newTimestamp) + } + ) }) }) }) @@ -517,7 +602,7 @@ describe('CanonicalTransactionChain', () => { ] for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { const cumulativePrevElements = batch.length * batchIndex - const localBatch = await appendAndGenerateBatch( + const localBatch = await appendAndGenerateSequencerBatch( batch, batchIndex, cumulativePrevElements @@ -592,7 +677,7 @@ describe('CanonicalTransactionChain', () => { it('should return false for wrong position with wrong indexInBatch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBatch = await appendAndGenerateBatch(batch) + const localBatch = await appendAndGenerateSequencerBatch(batch) const elementIndex = 1 const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex) @@ -611,7 +696,7 @@ describe('CanonicalTransactionChain', () => { it('should return false for wrong position and matching indexInBatch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBatch = await appendAndGenerateBatch(batch) + const localBatch = await appendAndGenerateSequencerBatch(batch) const elementIndex = 1 const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex) diff --git a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts index 7eb08caa72b22..b0d3abd3fb270 100644 --- a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts @@ -45,7 +45,7 @@ describe('StateCommitmentChain', () => { const DEFAULT_STATE_ROOT = '0x1234' const FORCE_INCLUSION_PERIOD = 600 - const appendAndGenerateBatch = async ( + const appendAndGenerateStateBatch = async ( batch: string[], batchIndex: number = 0, cumulativePrevElements: number = 0 @@ -66,7 +66,7 @@ describe('StateCommitmentChain', () => { // Submit the rollup batch on-chain await canonicalTxChain .connect(sequencer) - .appendTransactionBatch(batch, timestamp) + .appendSequencerBatch(batch, timestamp) } before(async () => { @@ -137,7 +137,7 @@ describe('StateCommitmentChain', () => { }) it('should calculate batchHeaderHash correctly', async () => { - const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) + const localBatch = await appendAndGenerateStateBatch(DEFAULT_STATE_BATCH) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await stateChain.batches(0) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) @@ -147,7 +147,7 @@ describe('StateCommitmentChain', () => { const numBatches = 5 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { const cumulativePrevElements = DEFAULT_STATE_BATCH.length * batchIndex - const localBatch = await appendAndGenerateBatch( + const localBatch = await appendAndGenerateStateBatch( DEFAULT_STATE_BATCH, batchIndex, cumulativePrevElements @@ -180,6 +180,7 @@ describe('StateCommitmentChain', () => { describe('verifyElement() ', async () => { it('should return true for valid elements for different batches and elements', async () => { + // add enough transaction batches so # txs > # state roots await appendTxBatch(DEFAULT_TX_BATCH) await appendTxBatch(DEFAULT_TX_BATCH) const numBatches = 3 @@ -194,7 +195,7 @@ describe('StateCommitmentChain', () => { ] for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { const cumulativePrevElements = batch.length * batchIndex - const localBatch = await appendAndGenerateBatch( + const localBatch = await appendAndGenerateStateBatch( batch, batchIndex, cumulativePrevElements @@ -221,7 +222,7 @@ describe('StateCommitmentChain', () => { it('should return false for wrong position with wrong indexInBatch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBatch = await appendAndGenerateBatch(batch) + const localBatch = await appendAndGenerateStateBatch(batch) const elementIndex = 1 const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex) @@ -240,7 +241,7 @@ describe('StateCommitmentChain', () => { it('should return false for wrong position and matching indexInBatch', async () => { const batch = ['0x1234', '0x4567', '0x890a', '0x4567', '0x890a', '0xabcd'] - const localBatch = await appendAndGenerateBatch(batch) + const localBatch = await appendAndGenerateStateBatch(batch) const elementIndex = 1 const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex) @@ -264,7 +265,7 @@ describe('StateCommitmentChain', () => { it('should not allow deletion from address other than fraud verifier', async () => { const cumulativePrevElements = 0 const batchIndex = 0 - const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) + const localBatch = await appendAndGenerateStateBatch(DEFAULT_STATE_BATCH) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), numElementsInBatch: DEFAULT_STATE_BATCH.length, @@ -284,7 +285,9 @@ describe('StateCommitmentChain', () => { beforeEach(async () => { const cumulativePrevElements = 0 const batchIndex = 0 - const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) + const localBatch = await appendAndGenerateStateBatch( + DEFAULT_STATE_BATCH + ) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), numElementsInBatch: DEFAULT_STATE_BATCH.length, @@ -302,7 +305,9 @@ describe('StateCommitmentChain', () => { }) it('should successfully append a batch after deletion', async () => { - const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) + const localBatch = await appendAndGenerateStateBatch( + DEFAULT_STATE_BATCH + ) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await stateChain.batches(0) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) @@ -314,7 +319,7 @@ describe('StateCommitmentChain', () => { const localBatches = [] for (let batchIndex = 0; batchIndex < 5; batchIndex++) { const cumulativePrevElements = batchIndex * DEFAULT_STATE_BATCH.length - const localBatch = await appendAndGenerateBatch( + const localBatch = await appendAndGenerateStateBatch( DEFAULT_STATE_BATCH, batchIndex, cumulativePrevElements @@ -338,7 +343,7 @@ describe('StateCommitmentChain', () => { it('should revert if batchHeader is incorrect', async () => { const cumulativePrevElements = 0 const batchIndex = 0 - const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) + const localBatch = await appendAndGenerateStateBatch(DEFAULT_STATE_BATCH) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), numElementsInBatch: DEFAULT_STATE_BATCH.length + 1, // increment to make header incorrect @@ -358,7 +363,7 @@ describe('StateCommitmentChain', () => { it('should revert if trying to delete a batch outside of valid range', async () => { const cumulativePrevElements = 0 const batchIndex = 1 // outside of range - const localBatch = await appendAndGenerateBatch(DEFAULT_STATE_BATCH) + const localBatch = await appendAndGenerateStateBatch(DEFAULT_STATE_BATCH) const batchHeader = { elementsMerkleRoot: await localBatch.elementsMerkleTree.getRootHash(), numElementsInBatch: DEFAULT_STATE_BATCH.length + 1, // increment to make header incorrect From a020a1c6f4e09e523e7b0a35c1d6c44429f79532 Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 4 Jun 2020 10:10:15 -0400 Subject: [PATCH 36/37] fix if-then-require cases --- .../contracts/CanonicalTransactionChain.sol | 34 +++++++++++-------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol index a7006bde45754..23648f53b098f 100644 --- a/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol +++ b/packages/rollup-contracts/contracts/CanonicalTransactionChain.sol @@ -53,18 +53,20 @@ contract CanonicalTransactionChain { function appendL1ToL2Batch() public { dt.TimestampedHash memory l1ToL2Header = l1ToL2Queue.peek(); - if(!safetyQueue.isEmpty()) { - require(l1ToL2Header.timestamp <= safetyQueue.peekTimestamp(), "Must process older SafetyQueue batches first to enforce timestamp monotonicity"); - } + require( + safetyQueue.isEmpty() || l1ToL2Header.timestamp <= safetyQueue.peekTimestamp(), + "Must process older SafetyQueue batches first to enforce timestamp monotonicity" + ); _appendQueueBatch(l1ToL2Header, true); l1ToL2Queue.dequeue(); } function appendSafetyBatch() public { dt.TimestampedHash memory safetyHeader = safetyQueue.peek(); - if(!l1ToL2Queue.isEmpty()) { - require(safetyHeader.timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older L1ToL2Queue batches first to enforce timestamp monotonicity"); - } + require( + l1ToL2Queue.isEmpty() || safetyHeader.timestamp <= l1ToL2Queue.peekTimestamp(), + "Must process older L1ToL2Queue batches first to enforce timestamp monotonicity" + ); _appendQueueBatch(safetyHeader, false); safetyQueue.dequeue(); } @@ -74,9 +76,10 @@ contract CanonicalTransactionChain { bool isL1ToL2Tx ) internal { uint timestamp = timestampedHash.timestamp; - if (timestamp + forceInclusionPeriod > now) { - require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch"); - } + require( + timestamp + forceInclusionPeriod <= now || authenticateAppend(msg.sender), + "Message sender does not have permission to append this batch" + ); lastOVMTimestamp = timestamp; bytes32 elementsMerkleRoot = timestampedHash.txHash; uint numElementsInBatch = 1; @@ -96,12 +99,13 @@ contract CanonicalTransactionChain { require(_txBatch.length > 0, "Cannot submit an empty batch"); require(_timestamp + forceInclusionPeriod > now, "Cannot submit a batch with a timestamp older than the sequencer inclusion period"); require(_timestamp <= now, "Cannot submit a batch with a timestamp in the future"); - if(!l1ToL2Queue.isEmpty()) { - require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older L1ToL2Queue batches first to enforce timestamp monotonicity"); - } - if(!safetyQueue.isEmpty()) { - require(_timestamp <= safetyQueue.peekTimestamp(), "Must process older SafetyQueue batches first to enforce timestamp monotonicity"); - } + require( + l1ToL2Queue.isEmpty() || _timestamp <= l1ToL2Queue.peekTimestamp(), + "Must process older L1ToL2Queue batches first to enforce timestamp monotonicity" + ); + require( + safetyQueue.isEmpty() || _timestamp <= safetyQueue.peekTimestamp(), + "Must process older SafetyQueue batches first to enforce timestamp monotonicity"); require(_timestamp >= lastOVMTimestamp, "Timestamps must monotonically increase"); lastOVMTimestamp = _timestamp; bytes32 batchHeaderHash = keccak256(abi.encodePacked( From 074e183b933a7d31a3316a93873666882f90409a Mon Sep 17 00:00:00 2001 From: Kevin Ho Date: Thu, 4 Jun 2020 14:57:14 -0400 Subject: [PATCH 37/37] alter some unit tests to use randomly generated batches --- .../rollup-contracts/test/helpers/index.ts | 82 ++----------------- .../merklization/RollupMerkleUtils.spec.ts | 10 +-- .../CanonicalTransactionChain.spec.ts | 32 ++++---- .../rollup-list/StateCommitmentChain.spec.ts | 34 ++++---- 4 files changed, 37 insertions(+), 121 deletions(-) diff --git a/packages/rollup-contracts/test/helpers/index.ts b/packages/rollup-contracts/test/helpers/index.ts index d4e3db3ed6664..81614d8539ae8 100644 --- a/packages/rollup-contracts/test/helpers/index.ts +++ b/packages/rollup-contracts/test/helpers/index.ts @@ -1,11 +1,3 @@ -/* Imports */ -import { - keccak256, - abi, - hexStrToBuf, - bufToHexString, -} from '@eth-optimism/core-utils' - /********************************** * Byte String Generation Helpers * *********************************/ @@ -18,74 +10,14 @@ export function makeRepeatedBytes(value: string, length: number): string { return '0x' + sliced } -// Make padded bytes. Bytes are right padded. -export function makePaddedBytes(value: string, length: number): string { - if (value.length > length * 2) { - throw new Error('Value too large to fit in ' + length + ' byte string') - } - const targetLength = length * 2 - while (value.length < (targetLength || 2)) { - value = value + '0' +export function makeRandomBlockOfSize(blockSize: number): string[] { + const block = [] + for (let i = 0; i < blockSize; i++) { + block.push(makeRepeatedBytes('' + Math.floor(Math.random() * 500 + 1), 32)) } - return '0x' + value + return block } -// Make a padded uint. Uints are left padded. -export function makePaddedUint(value: string, length: number): string { - if (value.length > length * 2) { - throw new Error('Value too large to fit in ' + length + ' byte string') - } - const targetLength = length * 2 - while (value.length < (targetLength || 2)) { - value = '0' + value - } - return '0x' + value +export function makeRandomBatchOfSize(batchSize: number): string[] { + return makeRandomBlockOfSize(batchSize) } - -/******************************* - * Transition Encoding Helpers * - ******************************/ -// export type Transition = string - -// // Generates some number of dummy transitions -// export function generateNTransitions( -// numTransitions: number -// ): RollupTransition[] { -// const transitions = [] -// for (let i = 0; i < numTransitions; i++) { -// const transfer: TransferTransition = { -// stateRoot: getStateRoot('ab'), -// senderSlotIndex: 2, -// recipientSlotIndex: 2, -// tokenType: 0, -// amount: 1, -// signature: getSignature('01'), -// } -// transitions.push(transfer) -// } -// return transitions -// } - -/**************** - * Misc Helpers * - ***************/ - -export const ZERO_BYTES32 = makeRepeatedBytes('0', 32) -export const ZERO_ADDRESS = makeRepeatedBytes('0', 20) -export const ZERO_UINT32 = makeRepeatedBytes('0', 4) -export const ZERO_SIGNATURE = makeRepeatedBytes('0', 65) - -/* Extra Helpers */ -export const STORAGE_TREE_HEIGHT = 5 -export const AMOUNT_BYTES = 5 -export const getSlot = (storageSlot: string) => - makePaddedUint(storageSlot, STORAGE_TREE_HEIGHT) -export const getAmount = (amount: string) => - makePaddedUint(amount, AMOUNT_BYTES) -export const getAddress = (address: string) => makeRepeatedBytes(address, 20) -export const getSignature = (sig: string) => makeRepeatedBytes(sig, 65) -export const getStateRoot = (bytes: string) => makeRepeatedBytes(bytes, 32) -export const getBytes32 = (bytes: string) => makeRepeatedBytes(bytes, 32) - -export const UNISWAP_ADDRESS = getAddress('00') -export const UNISWAP_STORAGE_SLOT = 0 diff --git a/packages/rollup-contracts/test/merklization/RollupMerkleUtils.spec.ts b/packages/rollup-contracts/test/merklization/RollupMerkleUtils.spec.ts index 8656fab3ecb50..6eb952f7b16ac 100644 --- a/packages/rollup-contracts/test/merklization/RollupMerkleUtils.spec.ts +++ b/packages/rollup-contracts/test/merklization/RollupMerkleUtils.spec.ts @@ -1,7 +1,7 @@ import '../setup' /* Internal Imports */ -import { makeRepeatedBytes } from '../helpers' +import { makeRepeatedBytes, makeRandomBlockOfSize } from '../helpers' /* External Imports */ import { newInMemoryDB, SparseMerkleTreeImpl } from '@eth-optimism/core-db' @@ -36,14 +36,6 @@ async function getNewSMT(treeHeight: number): Promise { return SparseMerkleTreeImpl.create(newInMemoryDB(), undefined, treeHeight) } -function makeRandomBlockOfSize(blockSize: number): string[] { - const block = [] - for (let i = 0; i < blockSize; i++) { - block.push(makeRepeatedBytes('' + Math.floor(Math.random() * 500 + 1), 32)) - } - return block -} - /* Begin tests */ describe('RollupMerkleUtils', () => { const provider = createMockProvider() diff --git a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts index aa65f106cb483..aa0d5d155040b 100644 --- a/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/CanonicalTransactionChain.spec.ts @@ -7,6 +7,7 @@ import { Contract } from 'ethers' /* Internal Imports */ import { TxChainBatch, TxQueueBatch } from './RLhelper' +import { makeRandomBatchOfSize } from '../helpers' /* Logging */ const log = getLogger('canonical-tx-chain', true) @@ -218,11 +219,13 @@ describe('CanonicalTransactionChain', () => { }) it('should add multiple batches correctly', async () => { - const numBatches = 10 + const numBatches = 5 + let expectedNumElements = 0 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const cumulativePrevElements = DEFAULT_BATCH.length * batchIndex + const batch = makeRandomBatchOfSize(batchIndex + 1) + const cumulativePrevElements = expectedNumElements const localBatch = await appendAndGenerateSequencerBatch( - DEFAULT_BATCH, + batch, batchIndex, cumulativePrevElements ) @@ -231,11 +234,10 @@ describe('CanonicalTransactionChain', () => { batchIndex ) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + expectedNumElements += batch.length } const cumulativeNumElements = await canonicalTxChain.cumulativeNumElements.call() - cumulativeNumElements - .toNumber() - .should.equal(numBatches * DEFAULT_BATCH.length) + cumulativeNumElements.toNumber().should.equal(expectedNumElements) const batchesLength = await canonicalTxChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatches) }) @@ -590,27 +592,21 @@ describe('CanonicalTransactionChain', () => { describe('verifyElement() ', async () => { it('should return true for valid elements for different batches and elements', async () => { - const numBatches = 3 - const batch = [ - '0x1234', - '0x4567', - '0x890a', - '0x4567', - '0x890a', - '0xabcd', - '0x1234', - ] + const numBatches = 4 + let cumulativePrevElements = 0 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const cumulativePrevElements = batch.length * batchIndex + const batchSize = batchIndex * batchIndex + 1 // 1, 2, 5, 10 + const batch = makeRandomBatchOfSize(batchSize) const localBatch = await appendAndGenerateSequencerBatch( batch, batchIndex, cumulativePrevElements ) + cumulativePrevElements += batchSize for ( let elementIndex = 0; elementIndex < batch.length; - elementIndex += 3 + elementIndex++ ) { const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex) diff --git a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts index b0d3abd3fb270..8996ec45edc19 100644 --- a/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts +++ b/packages/rollup-contracts/test/rollup-list/StateCommitmentChain.spec.ts @@ -7,6 +7,7 @@ import { Contract } from 'ethers' /* Internal Imports */ import { StateChainBatch } from './RLhelper' +import { makeRandomBatchOfSize } from '../helpers' /* Logging */ const log = getLogger('state-commitment-chain', true) @@ -144,22 +145,23 @@ describe('StateCommitmentChain', () => { }) it('should add multiple batches correctly', async () => { - const numBatches = 5 + const numBatches = 3 + let expectedNumElements = 0 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const cumulativePrevElements = DEFAULT_STATE_BATCH.length * batchIndex + const batch = makeRandomBatchOfSize(batchIndex + 1) + const cumulativePrevElements = expectedNumElements const localBatch = await appendAndGenerateStateBatch( - DEFAULT_STATE_BATCH, + batch, batchIndex, cumulativePrevElements ) const expectedBatchHeaderHash = await localBatch.hashBatchHeader() const calculatedBatchHeaderHash = await stateChain.batches(batchIndex) calculatedBatchHeaderHash.should.equal(expectedBatchHeaderHash) + expectedNumElements += batch.length } const cumulativeNumElements = await stateChain.cumulativeNumElements.call() - cumulativeNumElements - .toNumber() - .should.equal(numBatches * DEFAULT_STATE_BATCH.length) + cumulativeNumElements.toNumber().should.equal(expectedNumElements) const batchesLength = await stateChain.getBatchesLength() batchesLength.toNumber().should.equal(numBatches) }) @@ -182,28 +184,22 @@ describe('StateCommitmentChain', () => { it('should return true for valid elements for different batches and elements', async () => { // add enough transaction batches so # txs > # state roots await appendTxBatch(DEFAULT_TX_BATCH) - await appendTxBatch(DEFAULT_TX_BATCH) - const numBatches = 3 - const batch = [ - '0x1234', - '0x4567', - '0x890a', - '0x4567', - '0x890a', - '0xabcd', - '0x1234', - ] + + const numBatches = 4 + let cumulativePrevElements = 0 for (let batchIndex = 0; batchIndex < numBatches; batchIndex++) { - const cumulativePrevElements = batch.length * batchIndex + const batchSize = batchIndex * batchIndex + 1 // 1, 2, 5, 10 + const batch = makeRandomBatchOfSize(batchSize) const localBatch = await appendAndGenerateStateBatch( batch, batchIndex, cumulativePrevElements ) + cumulativePrevElements += batchSize for ( let elementIndex = 0; elementIndex < batch.length; - elementIndex += 3 + elementIndex++ ) { const element = batch[elementIndex] const position = localBatch.getPosition(elementIndex)