Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
baea8c4
add RollupTxQueue
K-Ho May 14, 2020
dfcff54
remove unused imports
K-Ho May 14, 2020
206b1ae
replace blockNumber with timestamp for canonicalTxChain
K-Ho May 17, 2020
57443a7
add isL1ToL2Tx, fix hashBlockHeader
K-Ho May 17, 2020
7c7bd14
rollupList -> RollupQueue, fix canonicalTransactionChain
K-Ho May 18, 2020
01d29e2
RollupQueue tests
K-Ho May 18, 2020
2e1b9ff
rename block -> batch
K-Ho May 18, 2020
a402776
check messages for erroring tests
K-Ho May 18, 2020
004d700
add timestamp to RollupQueue batches
K-Ho May 18, 2020
81d1099
alter dequeue to only support single batch dequeue
K-Ho May 18, 2020
5d7f0c9
integrate l1ToL2Queue into canonical chain
K-Ho May 20, 2020
a8bdd90
remove extraneous comments, working batch header verification
K-Ho May 20, 2020
e61c794
constrain rollupQueue to single tx batches
K-Ho May 20, 2020
8b87ebb
add main appendL1ToL2TxBatch functionality
K-Ho May 20, 2020
a09eae3
added appendTxBatch timestamp functionality
K-Ho May 21, 2020
43cd771
clean up comments
K-Ho May 21, 2020
55e7583
getFrontBatch -> peek
K-Ho May 21, 2020
72f8422
add tests for appendTxBatch timestamp protection
K-Ho May 21, 2020
d92c53c
add timestamp unit tests
K-Ho May 21, 2020
bee8426
clean up tests
K-Ho May 22, 2020
81f5557
clean up canonicaltxChain tests
K-Ho May 22, 2020
b2c3014
clean up neaming, add verifyElement test
K-Ho May 22, 2020
823bf62
add verifyElement test for L1toL2Batch
K-Ho May 22, 2020
534bc7a
add safetyTxQueue
K-Ho May 22, 2020
49d4523
add appendSafetyBatch and tests
K-Ho May 22, 2020
b3bd0c6
add verifyElement test for SafetyBatch
K-Ho May 22, 2020
7e14131
fix timestamp test
K-Ho May 23, 2020
8c49094
Merge branch 'master' into YAS-407/RollupChain/SlowQueue
K-Ho May 23, 2020
74caa38
use TestUtils to assert revert messages
K-Ho May 23, 2020
b3970ba
added timestamp unit tests
K-Ho May 25, 2020
f320843
clean up unit tests
K-Ho May 25, 2020
a8a6e5e
add state commitment chain, clean up canonical tx chain tests
K-Ho May 28, 2020
a8c6021
add state chain verify element
K-Ho May 28, 2020
df76432
add deleteAfterInclusive to state commitment chain
K-Ho Jun 1, 2020
705c154
clean up unit tests
K-Ho Jun 1, 2020
576c035
renaming functions, add more timestamp unit tests
K-Ho Jun 3, 2020
a020a1c
fix if-then-require cases
K-Ho Jun 4, 2020
074e183
alter some unit tests to use randomly generated batches
K-Ho Jun 4, 2020
592add8
Merge branch 'master' into YAS-407/RollupChain/SlowQueue
K-Ho Jun 4, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 40 additions & 11 deletions packages/rollup-contracts/contracts/CanonicalTransactionChain.sol
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@ pragma experimental ABIEncoderV2;
import {DataTypes as dt} from "./DataTypes.sol";
import {RollupMerkleUtils} from "./RollupMerkleUtils.sol";
import {L1ToL2TransactionQueue} from "./L1ToL2TransactionQueue.sol";
import {SafetyTransactionQueue} from "./SafetyTransactionQueue.sol";

contract CanonicalTransactionChain {
address public sequencer;
uint public forceInclusionPeriod;
RollupMerkleUtils public merkleUtils;
L1ToL2TransactionQueue public l1ToL2Queue;
SafetyTransactionQueue public safetyQueue;
uint public cumulativeNumElements;
bytes32[] public batches;
uint public lastOVMTimestamp;
Expand All @@ -24,6 +26,7 @@ contract CanonicalTransactionChain {
merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress);
sequencer = _sequencer;
l1ToL2Queue = new L1ToL2TransactionQueue(_rollupMerkleUtilsAddress, _l1ToL2TransactionPasserAddress, address(this));
safetyQueue = new SafetyTransactionQueue(_rollupMerkleUtilsAddress, address(this));
forceInclusionPeriod =_forceInclusionPeriod;
lastOVMTimestamp = 0;
}
Expand All @@ -49,34 +52,60 @@ contract CanonicalTransactionChain {
}

function appendL1ToL2Batch() public {
dt.TimestampedHash memory timestampedHash = l1ToL2Queue.peek();
dt.TimestampedHash memory l1ToL2Header = l1ToL2Queue.peek();
require(
safetyQueue.isEmpty() || l1ToL2Header.timestamp <= safetyQueue.peekTimestamp(),
"Must process older SafetyQueue batches first to enforce timestamp monotonicity"
);
_appendQueueBatch(l1ToL2Header, true);
l1ToL2Queue.dequeue();
}

function appendSafetyBatch() public {
dt.TimestampedHash memory safetyHeader = safetyQueue.peek();
require(
l1ToL2Queue.isEmpty() || safetyHeader.timestamp <= l1ToL2Queue.peekTimestamp(),
"Must process older L1ToL2Queue batches first to enforce timestamp monotonicity"
);
_appendQueueBatch(safetyHeader, false);
safetyQueue.dequeue();
}

function _appendQueueBatch(
dt.TimestampedHash memory timestampedHash,
bool isL1ToL2Tx
) internal {
uint timestamp = timestampedHash.timestamp;
if (timestamp + forceInclusionPeriod > now) {
require(authenticateAppend(msg.sender), "Message sender does not have permission to append this batch");
}
require(
timestamp + forceInclusionPeriod <= now || authenticateAppend(msg.sender),
"Message sender does not have permission to append this batch"
);
lastOVMTimestamp = timestamp;
bytes32 elementsMerkleRoot = timestampedHash.txHash;
uint numElementsInBatch = 1;
bytes32 batchHeaderHash = keccak256(abi.encodePacked(
timestamp,
true, // isL1ToL2Tx
isL1ToL2Tx,
elementsMerkleRoot,
numElementsInBatch,
cumulativeNumElements // cumulativePrevElements
));
batches.push(batchHeaderHash);
cumulativeNumElements += numElementsInBatch;
l1ToL2Queue.dequeue();
}

function appendTransactionBatch(bytes[] memory _txBatch, uint _timestamp) public {
function appendSequencerBatch(bytes[] memory _txBatch, uint _timestamp) public {
require(authenticateAppend(msg.sender), "Message sender does not have permission to append a batch");
require(_txBatch.length > 0, "Cannot submit an empty batch");
require(_timestamp + forceInclusionPeriod > now, "Cannot submit a batch with a timestamp older than the sequencer inclusion period");
require(_timestamp <= now, "Cannot submit a batch with a timestamp in the future");
if(!l1ToL2Queue.isEmpty()) {
require(_timestamp <= l1ToL2Queue.peekTimestamp(), "Must process older queued batches first to enforce timestamp monotonicity");
}
require(
l1ToL2Queue.isEmpty() || _timestamp <= l1ToL2Queue.peekTimestamp(),
"Must process older L1ToL2Queue batches first to enforce timestamp monotonicity"
);
require(
safetyQueue.isEmpty() || _timestamp <= safetyQueue.peekTimestamp(),
"Must process older SafetyQueue batches first to enforce timestamp monotonicity");
require(_timestamp >= lastOVMTimestamp, "Timestamps must monotonically increase");
lastOVMTimestamp = _timestamp;
bytes32 batchHeaderHash = keccak256(abi.encodePacked(
Expand All @@ -94,7 +123,7 @@ contract CanonicalTransactionChain {
function verifyElement(
bytes memory _element, // the element of the list being proven
uint _position, // the position in the list of the element being proven
dt.ElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch
dt.TxElementInclusionProof memory _inclusionProof // inclusion proof in the rollup batch
) public view returns (bool) {
// For convenience, store the batchHeader
dt.TxChainBatchHeader memory batchHeader = _inclusionProof.batchHeader;
Expand Down
21 changes: 17 additions & 4 deletions packages/rollup-contracts/contracts/DataTypes.sol
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,24 @@ contract DataTypes {
address l1MessageSender;
}

struct ElementInclusionProof {
uint batchIndex; // index in batches array (first batch has batchNumber of 0)
struct TxElementInclusionProof {
uint batchIndex;
TxChainBatchHeader batchHeader;
uint indexInBatch; // used to verify inclusion of the element in elementsMerkleRoot
bytes32[] siblings; // used to verify inclusion of the element in elementsMerkleRoot
uint indexInBatch;
bytes32[] siblings;
}

struct StateElementInclusionProof {
uint batchIndex;
StateChainBatchHeader batchHeader;
uint indexInBatch;
bytes32[] siblings;
}

struct StateChainBatchHeader {
bytes32 elementsMerkleRoot;
uint numElementsInBatch;
uint cumulativePrevElements;
}

struct TxChainBatchHeader {
Expand Down
20 changes: 20 additions & 0 deletions packages/rollup-contracts/contracts/SafetyTransactionQueue.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
pragma solidity ^0.5.0;
pragma experimental ABIEncoderV2;

/* Internal Imports */
import {RollupQueue} from "./RollupQueue.sol";

contract SafetyTransactionQueue is RollupQueue {
address public canonicalTransactionChain;

constructor(
address _rollupMerkleUtilsAddress,
address _canonicalTransactionChain
) RollupQueue(_rollupMerkleUtilsAddress) public {
canonicalTransactionChain = _canonicalTransactionChain;
}

function authenticateDequeue(address _sender) public view returns (bool) {
return _sender == canonicalTransactionChain;
}
}
84 changes: 84 additions & 0 deletions packages/rollup-contracts/contracts/StateCommitmentChain.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
pragma solidity ^0.5.0;
pragma experimental ABIEncoderV2;

/* Internal Imports */
import {DataTypes as dt} from "./DataTypes.sol";
import {RollupMerkleUtils} from "./RollupMerkleUtils.sol";
import {CanonicalTransactionChain} from "./CanonicalTransactionChain.sol";

contract StateCommitmentChain {
CanonicalTransactionChain canonicalTransactionChain;
RollupMerkleUtils public merkleUtils;
address public fraudVerifier;
uint public cumulativeNumElements;
bytes32[] public batches;

constructor(
address _rollupMerkleUtilsAddress,
address _canonicalTransactionChain,
address _fraudVerifier
) public {
merkleUtils = RollupMerkleUtils(_rollupMerkleUtilsAddress);
canonicalTransactionChain = CanonicalTransactionChain(_canonicalTransactionChain);
fraudVerifier = _fraudVerifier;
}

function getBatchesLength() public view returns (uint) {
return batches.length;
}

function hashBatchHeader(
dt.StateChainBatchHeader memory _batchHeader
) public pure returns (bytes32) {
return keccak256(abi.encodePacked(
_batchHeader.elementsMerkleRoot,
_batchHeader.numElementsInBatch,
_batchHeader.cumulativePrevElements
));
}

function appendStateBatch(bytes[] memory _stateBatch) public {
require(cumulativeNumElements + _stateBatch.length <= canonicalTransactionChain.cumulativeNumElements(),
"Cannot append more state commitments than total number of transactions in CanonicalTransactionChain");
require(_stateBatch.length > 0, "Cannot submit an empty state commitment batch");
bytes32 batchHeaderHash = keccak256(abi.encodePacked(
merkleUtils.getMerkleRoot(_stateBatch), // elementsMerkleRoot
_stateBatch.length, // numElementsInBatch
cumulativeNumElements // cumulativeNumElements
));
batches.push(batchHeaderHash);
cumulativeNumElements += _stateBatch.length;
}

// verifies an element is in the current list at the given position
function verifyElement(
bytes memory _element, // the element of the list being proven
uint _position, // the position in the list of the element being proven
dt.StateElementInclusionProof memory _inclusionProof
) public view returns (bool) {
dt.StateChainBatchHeader memory batchHeader = _inclusionProof.batchHeader;
if(_position != _inclusionProof.indexInBatch +
batchHeader.cumulativePrevElements)
return false;
if (!merkleUtils.verify(
batchHeader.elementsMerkleRoot,
_element,
_inclusionProof.indexInBatch,
_inclusionProof.siblings
)) return false;
//compare computed batch header with the batch header in the list.
return hashBatchHeader(batchHeader) == batches[_inclusionProof.batchIndex];
}

function deleteAfterInclusive(
uint _batchIndex,
dt.StateChainBatchHeader memory _batchHeader
) public {
require(msg.sender == fraudVerifier, "Only FraudVerifier has permission to delete state batches");
require(_batchIndex < batches.length, "Cannot delete batches outside of valid range");
bytes32 calculatedBatchHeaderHash = hashBatchHeader(_batchHeader);
require(calculatedBatchHeaderHash == batches[_batchIndex], "Calculated batch header is different than expected batch header");
batches.length = _batchIndex;
cumulativeNumElements = _batchHeader.cumulativePrevElements;
}
}
82 changes: 7 additions & 75 deletions packages/rollup-contracts/test/helpers/index.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,3 @@
/* Imports */
import {
keccak256,
abi,
hexStrToBuf,
bufToHexString,
} from '@eth-optimism/core-utils'

/**********************************
* Byte String Generation Helpers *
*********************************/
Expand All @@ -18,74 +10,14 @@ export function makeRepeatedBytes(value: string, length: number): string {
return '0x' + sliced
}

// Make padded bytes. Bytes are right padded.
export function makePaddedBytes(value: string, length: number): string {
if (value.length > length * 2) {
throw new Error('Value too large to fit in ' + length + ' byte string')
}
const targetLength = length * 2
while (value.length < (targetLength || 2)) {
value = value + '0'
export function makeRandomBlockOfSize(blockSize: number): string[] {
const block = []
for (let i = 0; i < blockSize; i++) {
block.push(makeRepeatedBytes('' + Math.floor(Math.random() * 500 + 1), 32))
}
return '0x' + value
return block
}

// Make a padded uint. Uints are left padded.
export function makePaddedUint(value: string, length: number): string {
if (value.length > length * 2) {
throw new Error('Value too large to fit in ' + length + ' byte string')
}
const targetLength = length * 2
while (value.length < (targetLength || 2)) {
value = '0' + value
}
return '0x' + value
export function makeRandomBatchOfSize(batchSize: number): string[] {
return makeRandomBlockOfSize(batchSize)
}

/*******************************
* Transition Encoding Helpers *
******************************/
// export type Transition = string

// // Generates some number of dummy transitions
// export function generateNTransitions(
// numTransitions: number
// ): RollupTransition[] {
// const transitions = []
// for (let i = 0; i < numTransitions; i++) {
// const transfer: TransferTransition = {
// stateRoot: getStateRoot('ab'),
// senderSlotIndex: 2,
// recipientSlotIndex: 2,
// tokenType: 0,
// amount: 1,
// signature: getSignature('01'),
// }
// transitions.push(transfer)
// }
// return transitions
// }

/****************
* Misc Helpers *
***************/

export const ZERO_BYTES32 = makeRepeatedBytes('0', 32)
export const ZERO_ADDRESS = makeRepeatedBytes('0', 20)
export const ZERO_UINT32 = makeRepeatedBytes('0', 4)
export const ZERO_SIGNATURE = makeRepeatedBytes('0', 65)

/* Extra Helpers */
export const STORAGE_TREE_HEIGHT = 5
export const AMOUNT_BYTES = 5
export const getSlot = (storageSlot: string) =>
makePaddedUint(storageSlot, STORAGE_TREE_HEIGHT)
export const getAmount = (amount: string) =>
makePaddedUint(amount, AMOUNT_BYTES)
export const getAddress = (address: string) => makeRepeatedBytes(address, 20)
export const getSignature = (sig: string) => makeRepeatedBytes(sig, 65)
export const getStateRoot = (bytes: string) => makeRepeatedBytes(bytes, 32)
export const getBytes32 = (bytes: string) => makeRepeatedBytes(bytes, 32)

export const UNISWAP_ADDRESS = getAddress('00')
export const UNISWAP_STORAGE_SLOT = 0
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import '../setup'

/* Internal Imports */
import { makeRepeatedBytes } from '../helpers'
import { makeRepeatedBytes, makeRandomBlockOfSize } from '../helpers'

/* External Imports */
import { newInMemoryDB, SparseMerkleTreeImpl } from '@eth-optimism/core-db'
Expand Down Expand Up @@ -36,14 +36,6 @@ async function getNewSMT(treeHeight: number): Promise<SparseMerkleTreeImpl> {
return SparseMerkleTreeImpl.create(newInMemoryDB(), undefined, treeHeight)
}

function makeRandomBlockOfSize(blockSize: number): string[] {
const block = []
for (let i = 0; i < blockSize; i++) {
block.push(makeRepeatedBytes('' + Math.floor(Math.random() * 500 + 1), 32))
}
return block
}

/* Begin tests */
describe('RollupMerkleUtils', () => {
const provider = createMockProvider()
Expand Down
Loading