diff --git a/packages/rollup-contracts/contracts/utils/BytesLib.sol b/packages/rollup-contracts/contracts/utils/BytesLib.sol new file mode 100644 index 0000000000000..0694c82eb96ad --- /dev/null +++ b/packages/rollup-contracts/contracts/utils/BytesLib.sol @@ -0,0 +1,206 @@ +/* + * @title Solidity Bytes Arrays Utils + * @author Gonçalo Sá + * + * @dev Bytes tightly packed arrays utility library for ethereum contracts written in Solidity. + * The library lets you concatenate, slice and type cast bytes arrays both in memory and storage. + */ +pragma solidity ^0.5.0; + + +library BytesLib { + function concat( + bytes memory _preBytes, + bytes memory _postBytes + ) + internal + pure + returns (bytes memory) + { + bytes memory tempBytes; + + assembly { + // Get a location of some free memory and store it in tempBytes as + // Solidity does for memory variables. + tempBytes := mload(0x40) + + // Store the length of the first bytes array at the beginning of + // the memory for tempBytes. + let length := mload(_preBytes) + mstore(tempBytes, length) + + // Maintain a memory counter for the current write location in the + // temp bytes array by adding the 32 bytes for the array length to + // the starting location. + let mc := add(tempBytes, 0x20) + // Stop copying when the memory counter reaches the length of the + // first bytes array. + let end := add(mc, length) + + for { + // Initialize a copy counter to the start of the _preBytes data, + // 32 bytes into its memory. + let cc := add(_preBytes, 0x20) + } lt(mc, end) { + // Increase both counters by 32 bytes each iteration. + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + // Write the _preBytes data into the tempBytes memory 32 bytes + // at a time. + mstore(mc, mload(cc)) + } + + // Add the length of _postBytes to the current length of tempBytes + // and store it as the new length in the first 32 bytes of the + // tempBytes memory. + length := mload(_postBytes) + mstore(tempBytes, add(length, mload(tempBytes))) + + // Move the memory counter back from a multiple of 0x20 to the + // actual end of the _preBytes data. + mc := end + // Stop copying when the memory counter reaches the new combined + // length of the arrays. + end := add(mc, length) + + for { + let cc := add(_postBytes, 0x20) + } lt(mc, end) { + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + mstore(mc, mload(cc)) + } + + // Update the free-memory pointer by padding our last write location + // to 32 bytes: add 31 bytes to the end of tempBytes to move to the + // next 32 byte block, then round down to the nearest multiple of + // 32. If the sum of the length of the two arrays is zero then add + // one before rounding down to leave a blank 32 bytes (the length block with 0). + mstore(0x40, and( + add(add(end, iszero(add(length, mload(_preBytes)))), 31), + not(31) // Round down to the nearest 32 bytes. + )) + } + + return tempBytes; + } + + function slice( + bytes memory _bytes, + uint256 _start, + uint256 _length + ) + internal + pure + returns (bytes memory) + { + require(_bytes.length >= (_start + _length), "Read out of bounds"); + + bytes memory tempBytes; + + assembly { + switch iszero(_length) + case 0 { + // Get a location of some free memory and store it in tempBytes as + // Solidity does for memory variables. + tempBytes := mload(0x40) + + // The first word of the slice result is potentially a partial + // word read from the original array. To read it, we calculate + // the length of that partial word and start copying that many + // bytes into the array. The first word we copy will start with + // data we don't care about, but the last `lengthmod` bytes will + // land at the beginning of the contents of the new array. When + // we're done copying, we overwrite the full first word with + // the actual length of the slice. + let lengthmod := and(_length, 31) + + // The multiplication in the next line is necessary + // because when slicing multiples of 32 bytes (lengthmod == 0) + // the following copy loop was copying the origin's length + // and then ending prematurely not copying everything it should. + let mc := add(add(tempBytes, lengthmod), mul(0x20, iszero(lengthmod))) + let end := add(mc, _length) + + for { + // The multiplication in the next line has the same exact purpose + // as the one above. + let cc := add(add(add(_bytes, lengthmod), mul(0x20, iszero(lengthmod))), _start) + } lt(mc, end) { + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + mstore(mc, mload(cc)) + } + + mstore(tempBytes, _length) + + //update free-memory pointer + //allocating the array padded to 32 bytes like the compiler does now + mstore(0x40, and(add(mc, 31), not(31))) + } + //if we want a zero-length slice let's just return a zero-length array + default { + tempBytes := mload(0x40) + + mstore(0x40, add(tempBytes, 0x20)) + } + } + + return tempBytes; + } + + function slice( + bytes memory _bytes, + uint256 _start + ) + internal + pure + returns (bytes memory) + { + if (_bytes.length - _start == 0) { + return bytes(''); + } + + return slice(_bytes, _start, _bytes.length - _start); + } + + function toBytes32(bytes memory _bytes) internal pure returns (bytes32) { + bytes32 ret; + assembly { + ret := mload(add(_bytes, 32)) + } + return ret; + } + + function toUint256(bytes memory _bytes) internal pure returns (uint256) { + return uint256(toBytes32(_bytes)); + } + + function toNibbles(bytes memory _bytes) internal pure returns (bytes memory) { + bytes memory nibbles = new bytes(_bytes.length * 2); + + for (uint256 i = 0; i < _bytes.length; i++) { + nibbles[i * 2] = _bytes[i] >> 4; + nibbles[i * 2 + 1] = bytes1(uint8(_bytes[i]) % 16); + } + + return nibbles; + } + + function fromNibbles(bytes memory _bytes) internal pure returns (bytes memory) { + bytes memory ret = new bytes(_bytes.length / 2); + + for (uint256 i = 0; i < ret.length; i++) { + ret[i] = (_bytes[i * 2] << 4) | (_bytes[i * 2 + 1]); + } + + return ret; + } + + function equal(bytes memory _bytes, bytes memory _other) internal pure returns (bool) { + return keccak256(_bytes) == keccak256(_other); + } +} \ No newline at end of file diff --git a/packages/rollup-contracts/contracts/utils/MerkleTrie.sol b/packages/rollup-contracts/contracts/utils/MerkleTrie.sol new file mode 100644 index 0000000000000..0f96e609daca4 --- /dev/null +++ b/packages/rollup-contracts/contracts/utils/MerkleTrie.sol @@ -0,0 +1,767 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.5.0; + +import './BytesLib.sol'; +import './RLPReader.sol'; +import './RLPWriter.sol'; + +contract MerkleTrie { + // TREE_RADIX determines the number of elements per branch node. + uint256 constant TREE_RADIX = 16; + // Branch nodes have TREE_RADIX elements plus an additional `value` slot. + uint256 constant BRANCH_NODE_LENGTH = TREE_RADIX + 1; + // Leaf nodes and extension nodes always have two elements, a `path` and a `value`. + uint256 constant LEAF_OR_EXTENSION_NODE_LENGTH = 2; + + // Prefixes are prepended to the `path` within a leaf or extension node and + // allow us to differentiate between the two node types. `ODD` or `EVEN` is + // determined by the number of nibbles within the unprefixed `path`. If the + // number of nibbles if even, we need to insert an extra padding nibble so + // the resulting prefixed `path` has an even number of nibbles. + uint8 constant PREFIX_EXTENSION_EVEN = 0; + uint8 constant PREFIX_EXTENSION_ODD = 1; + uint8 constant PREFIX_LEAF_EVEN = 2; + uint8 constant PREFIX_LEAF_ODD = 3; + + // Just a utility constant. RLP represents `NULL` as 0x80. + bytes1 constant RLP_NULL = bytes1(0x80); + + enum NodeType { + BranchNode, + ExtensionNode, + LeafNode + } + + struct TrieNode { + bytes encoded; + RLPReader.RLPItem[] decoded; + } + + + /* + * Public Functions + */ + + /** + * @notice Verifies a proof that a given key/value pair is present in the + * Merkle trie. + * @param _key Key of the node to search for, as a hex string. + * @param _value Value of the node to search for, as a hex string. + * @param _proof Merkle trie inclusion proof for the desired node. Unlike + * traditional Merkle trees, this proof is executed top-down and consists + * of a list of RLP-encoded nodes that make a path down to the target node. + * @param _root Known root of the Merkle trie. Used to verify that the + * included proof is correctly constructed. + * @return `true` if the k/v pair exists in the trie, `false` otherwise. + */ + function verifyInclusionProof( + bytes memory _key, + bytes memory _value, + bytes memory _proof, + bytes32 _root + ) public pure returns (bool) { + return verifyProof(_key, _value, _proof, _root, true); + } + + /** + * @notice Verifies a proof that a given key/value pair is *not* present in + * the Merkle trie. + * @param _key Key of the node to search for, as a hex string. + * @param _value Value of the node to search for, as a hex string. + * @param _proof Merkle trie inclusion proof for the node *nearest* the + * target node. We effectively need to show that either the key exists and + * its value differs, or the key does not exist at all. + * @param _root Known root of the Merkle trie. Used to verify that the + * included proof is correctly constructed. + * @return `true` if the k/v pair is absent in the trie, `false` otherwise. + */ + function verifyExclusionProof( + bytes memory _key, + bytes memory _value, + bytes memory _proof, + bytes32 _root + ) public pure returns (bool) { + return verifyProof(_key, _value, _proof, _root, false); + } + + /** + * @notice Updates a Merkle trie and returns a new root hash. + * @param _key Key of the node to update, as a hex string. + * @param _value Value of the node to update, as a hex string. + * @param _proof Merkle trie inclusion proof for the node *nearest* the + * target node. If the key exists, we can simply update the value. + * Otherwise, we need to modify the trie to handle the new k/v pair. + * @param _root Known root of the Merkle trie. Used to verify that the + * included proof is correctly constructed. + * @return Root hash of the newly constructed trie. + */ + function update( + bytes memory _key, + bytes memory _value, + bytes memory _proof, + bytes32 _root + ) public pure returns (bytes32) { + TrieNode[] memory proof = parseProof(_proof); + (uint256 pathLength, bytes memory keyRemainder, ) = walkNodePath(proof, _key, _root); + + TrieNode[] memory newPath = getNewPath(proof, pathLength, keyRemainder, _value); + + return getUpdatedTrieRoot(newPath, _key); + } + + + /* + * Internal Functions + */ + + /** + * @notice Utility function that handles verification of inclusion or + * exclusion proofs. Since the verification methods are almost identical, + * it's easier to shove this into a single function. + * @param _key Key of the node to search for, as a hex string. + * @param _value Value of the node to search for, as a hex string. + * @param _proof Merkle trie inclusion proof for the node *nearest* the + * target node. If we're proving explicit inclusion, the nearest node + * should be the target node. + * @param _root Known root of the Merkle trie. Used to verify that the + * included proof is correctly constructed. + * @param _inclusion Whether to check for inclusion or exclusion. + * @return `true` if the k/v pair is (in/not in) the trie, `false` otherwise. + */ + function verifyProof( + bytes memory _key, + bytes memory _value, + bytes memory _proof, + bytes32 _root, + bool _inclusion + ) internal pure returns (bool) { + TrieNode[] memory proof = parseProof(_proof); + (uint256 pathLength, bytes memory keyRemainder, bool isFinalNode) = walkNodePath(proof, _key, _root); + + if (_inclusion) { + // Included leaf nodes should have no key remainder, values should match. + return ( + keyRemainder.length == 0 && + BytesLib.equal(getNodeValue(proof[pathLength - 1]), _value) + ); + } else { + // If there's no key remainder then a leaf with the given key exists and the value should differ. + // Otherwise, we need to make sure that we've hit a dead end. + return ( + (keyRemainder.length == 0 && !BytesLib.equal(getNodeValue(proof[pathLength - 1]), _value)) || + (keyRemainder.length != 0 && isFinalNode) + ); + } + } + + /** + * @notice Walks through a proof using a provided key. + * @param _proof Inclusion proof to walk through. + * @param _key Key to use for the walk. + * @param _root Known root of the trie. + * @return ( + * Length of the final path; + * Portion of the key remaining after the walk; + * Whether or not we've hit a dead end; + * ) + */ + function walkNodePath( + TrieNode[] memory _proof, + bytes memory _key, + bytes32 _root + ) internal pure returns ( + uint256, + bytes memory, + bool + ) { + uint256 pathLength = 0; + bytes memory key = BytesLib.toNibbles(_key); + + bytes32 currentNodeID = _root; + uint256 currentKeyIndex = 0; + uint256 currentKeyIncrement = 0; + TrieNode memory currentNode; + + // Proof is top-down, so we start at the first element (root). + for (uint256 i = 0; i < _proof.length; i++) { + currentNode = _proof[i]; + currentKeyIndex += currentKeyIncrement; + + // Keep track of the proof elements we actually need. + // It's expensive to resize arrays, so this simply reduces gas costs. + pathLength += 1; + + if (currentKeyIndex == 0) { + // First proof element is always the root node. + require( + keccak256(currentNode.encoded) == currentNodeID, + "Invalid root hash" + ); + } else if (currentNode.encoded.length >= 32) { + // Nodes 32 bytes or larger are hashed inside branch nodes. + require( + keccak256(currentNode.encoded) == currentNodeID, + "Invalid large internal hash" + ); + } else { + // Nodes smaller than 31 bytes aren't hashed. + require( + BytesLib.toBytes32(currentNode.encoded) == currentNodeID, + "Invalid internal node hash" + ); + } + + if (currentNode.decoded.length == BRANCH_NODE_LENGTH) { + if (currentKeyIndex == key.length) { + // We've hit the end of the key, meaning the value should be within this branch node. + break; + } else { + // We're not at the end of the key yet. + // Figure out what the next node ID should be and continue. + uint8 branchKey = uint8(key[currentKeyIndex]); + RLPReader.RLPItem memory nextNode = currentNode.decoded[branchKey]; + currentNodeID = getNodeID(nextNode); + currentKeyIncrement = 1; + continue; + } + } else if (currentNode.decoded.length == LEAF_OR_EXTENSION_NODE_LENGTH) { + bytes memory path = getNodePath(currentNode); + uint8 prefix = uint8(path[0]); + uint8 offset = 2 - prefix % 2; + bytes memory pathRemainder = BytesLib.slice(path, offset); + bytes memory keyRemainder = BytesLib.slice(key, currentKeyIndex); + uint256 sharedNibbleLength = getSharedNibbleLength(pathRemainder, keyRemainder); + + if (prefix == PREFIX_LEAF_EVEN || prefix == PREFIX_LEAF_ODD) { + if ( + pathRemainder.length == sharedNibbleLength && + keyRemainder.length == sharedNibbleLength + ) { + // The key within this leaf matches our key exactly. + // Increment the key index to reflect that we have no remainder. + currentKeyIndex += sharedNibbleLength; + } + + // We've hit a leaf node, so our next node should be NULL. + currentNodeID = bytes32(RLP_NULL); + break; + } else if (prefix == PREFIX_EXTENSION_EVEN || prefix == PREFIX_EXTENSION_ODD) { + if (sharedNibbleLength == 0) { + // Our extension node doesn't share any part of our key. + // We've hit the end of this path, updates will need to modify this extension. + currentNodeID = bytes32(RLP_NULL); + break; + } else { + // Our extension shares some nibbles. + // Carry on to the next node. + currentNodeID = getNodeID(currentNode.decoded[1]); + currentKeyIncrement = sharedNibbleLength; + continue; + } + } + } + } + + // If our node ID is NULL, then we're at a dead end. + bool isFinalNode = currentNodeID == bytes32(RLP_NULL); + return (pathLength, BytesLib.slice(key, currentKeyIndex), isFinalNode); + } + + /** + * @notice Creates new nodes to support a k/v pair insertion into a given + * Merkle trie path. + * @param _path Path to the node nearest the k/v pair. + * @param _pathLength Length of the path. Necessary because the provided + * path may include additional nodes (e.g., it comes directly from a proof) + * and we can't resize in-memory arrays without costly duplication. + * @param _keyRemainder Portion of the initial key that must be inserted + * into the trie. + * @param _value Value to insert at the given key. + * @return A new path with the inserted k/v pair and extra supporting nodes. + */ + function getNewPath( + TrieNode[] memory _path, + uint256 _pathLength, + bytes memory _keyRemainder, + bytes memory _value + ) internal pure returns (TrieNode[] memory) { + bytes memory keyRemainder = _keyRemainder; + + // Most of our logic depends on the status of the last node in the path. + TrieNode memory lastNode = _path[_pathLength - 1]; + NodeType lastNodeType = getNodeType(lastNode); + + // Create an array for newly created nodes. + // We need up to three new nodes, depending on the contents of the last node. + // Since array resizing is expensive, we'll keep track of the size manually. + // We're using an explicit `totalNewNodes += 1` after insertions for clarity. + TrieNode[] memory newNodes = new TrieNode[](3); + uint256 totalNewNodes = 0; + + if (keyRemainder.length == 0 && lastNodeType == NodeType.LeafNode) { + // We've found a leaf node with the given key. + // Simply need to update the value of the node to match. + newNodes[totalNewNodes] = makeLeafNode(getNodeKey(lastNode), _value); + totalNewNodes += 1; + } else if (lastNodeType == NodeType.BranchNode) { + if (keyRemainder.length == 0) { + // We've found a branch node with the given key. + // Simply need to update the value of the node to match. + newNodes[totalNewNodes] = editBranchValue(lastNode, _value); + totalNewNodes += 1; + } else { + // We've found a branch node, but it doesn't contain our key. + // Reinsert the old branch for now. + newNodes[totalNewNodes] = lastNode; + totalNewNodes += 1; + // Create a new leaf node, slicing our remainder since the first byte points + // to our branch node. + newNodes[totalNewNodes] = makeLeafNode(BytesLib.slice(keyRemainder, 1), _value); + totalNewNodes += 1; + } + } else { + // Our last node is either an extension node or a leaf node with a different key. + bytes memory lastNodeKey = getNodeKey(lastNode); + uint256 sharedNibbleLength = getSharedNibbleLength(lastNodeKey, keyRemainder); + + if (sharedNibbleLength != 0) { + // We've got some shared nibbles between the last node and our key remainder. + // We'll need to insert an extension node that covers these shared nibbles. + bytes memory nextNodeKey = BytesLib.slice(lastNodeKey, 0, sharedNibbleLength); + newNodes[totalNewNodes] = makeExtensionNode(nextNodeKey, getNodeHash(_value)); + totalNewNodes += 1; + + // Cut down the keys since we've just covered these shared nibbles. + lastNodeKey = BytesLib.slice(lastNodeKey, sharedNibbleLength); + keyRemainder = BytesLib.slice(keyRemainder, sharedNibbleLength); + } + + // Create an empty branch to fill in. + TrieNode memory newBranch = makeEmptyBranchNode(); + + if (lastNodeKey.length == 0) { + // Key remainder was larger than the key for our last node. + // The value within our last node is therefore going to be shifted into + // a branch value slot. + newBranch = editBranchValue(newBranch, getNodeValue(lastNode)); + } else { + // Last node key was larger than the key remainder. + // We're going to modify some index of our branch. + uint8 branchKey = uint8(lastNodeKey[0]); + // Move on to the next nibble. + lastNodeKey = BytesLib.slice(lastNodeKey, 1); + + if (lastNodeType == NodeType.LeafNode) { + // We're dealing with a leaf node. + // We'll modify the key and insert the old leaf node into the branch index. + TrieNode memory modifiedLastNode = makeLeafNode(lastNodeKey, getNodeValue(lastNode)); + newBranch = editBranchIndex(newBranch, branchKey, getNodeHash(modifiedLastNode.encoded)); + } else if (lastNodeKey.length != 0) { + // We're dealing with a shrinking extension node. + // We need to modify the node to decrease the size of the key. + TrieNode memory modifiedLastNode = makeExtensionNode(lastNodeKey, getNodeValue(lastNode)); + newBranch = editBranchIndex(newBranch, branchKey, getNodeHash(modifiedLastNode.encoded)); + } else { + // We're dealing with an unnecessary extension node. + // We're going to delete the node entirely. + // Simply insert its current value into the branch index. + newBranch = editBranchIndex(newBranch, branchKey, getNodeValue(lastNode)); + } + } + + if (keyRemainder.length == 0) { + // We've got nothing left in the key remainder. + // Simply insert the value into the branch value slot. + newBranch = editBranchValue(newBranch, _value); + // Push the branch into the list of new nodes. + newNodes[totalNewNodes] = newBranch; + totalNewNodes += 1; + } else { + // We've got some key remainder to work with. + // We'll be inserting a leaf node into the trie. + // First, move on to the next nibble. + keyRemainder = BytesLib.slice(keyRemainder, 1); + // Push the branch into the list of new nodes. + newNodes[totalNewNodes] = newBranch; + totalNewNodes += 1; + // Push a new leaf node for our k/v pair. + newNodes[totalNewNodes] = makeLeafNode(keyRemainder, _value); + totalNewNodes += 1; + } + } + + // Finally, join the old path with our newly created nodes. + // Since we're overwriting the last node in the path, we use `_pathLength - 1`. + return joinNodeArrays(_path, _pathLength - 1, newNodes, totalNewNodes); + } + + /** + * @notice Computes the trie root from a given path. + * @param _nodes Path to some k/v pair. + * @param _key Key for the k/v pair. + * @return Root hash for the updated trie. + */ + function getUpdatedTrieRoot( + TrieNode[] memory _nodes, + bytes memory _key + ) internal pure returns (bytes32) { + bytes memory key = BytesLib.toNibbles(_key); + + // Some variables to keep track of during iteration. + TrieNode memory currentNode; + NodeType currentNodeType; + bytes memory previousNodeHash; + + // Run through the path backwards to rebuild our root hash. + for (uint256 i = _nodes.length; i > 0; i--) { + // Pick out the current node. + currentNode = _nodes[i - 1]; + currentNodeType = getNodeType(currentNode); + + if (currentNodeType == NodeType.LeafNode) { + // Leaf nodes are already correctly encoded. + // Shift the key over to account for the nodes key. + bytes memory nodeKey = getNodeKey(currentNode); + key = BytesLib.slice(key, 0, key.length - nodeKey.length); + } else if (currentNodeType == NodeType.ExtensionNode) { + // Shift the key over to account for the nodes key. + bytes memory nodeKey = getNodeKey(currentNode); + key = BytesLib.slice(key, 0, key.length - nodeKey.length); + + // If this node is the last element in the path, it'll be correctly encoded + // and we can skip this part. + if (previousNodeHash.length > 0) { + // Re-encode the node based on the previous node. + currentNode = makeExtensionNode(nodeKey, previousNodeHash); + } + } else if (currentNodeType == NodeType.BranchNode) { + // If this node is the last element in the path, it'll be correctly encoded + // and we can skip this part. + if (previousNodeHash.length > 0) { + // Re-encode the node based on the previous node. + uint8 branchKey = uint8(key[key.length - 1]); + key = BytesLib.slice(key, 0, key.length - 1); + currentNode = editBranchIndex(currentNode, branchKey, previousNodeHash); + } + } + + // Compute the node hash for the next iteration. + previousNodeHash = getNodeHash(currentNode.encoded); + } + + // Current node should be the root at this point. + // Simply return the hash of its encoding. + return keccak256(currentNode.encoded); + } + + /** + * @notice Parses an RLP-encoded proof into something more useful. + * @param _proof RLP-encoded proof to parse. + * @return Proof parsed into easily accessible structs. + */ + function parseProof( + bytes memory _proof + ) internal pure returns (TrieNode[] memory) { + RLPReader.RLPItem[] memory nodes = RLPReader.toList(RLPReader.toRlpItem(_proof)); + TrieNode[] memory proof = new TrieNode[](nodes.length); + + for (uint256 i = 0; i < nodes.length; i++) { + bytes memory encoded = RLPReader.toBytes(nodes[i]); + proof[i] = TrieNode({ + encoded: encoded, + decoded: RLPReader.toList(RLPReader.toRlpItem(encoded)) + }); + } + + return proof; + } + + /** + * @notice Picks out the ID for a node. Node ID is referred to as the + * "hash" within the specification, but nodes < 32 bytes are not actually + * hashed. + * @param _node Node to pull an ID for. + * @return ID for the node, depending on the size of its contents. + */ + function getNodeID( + RLPReader.RLPItem memory _node + ) internal pure returns (bytes32) { + bytes memory nodeID; + + if (_node.len < 32) { + // Nodes smaller than 32 bytes are RLP encoded. + nodeID = RLPReader.toRlpBytes(_node); + } else { + // Nodes 32 bytes or larger are hashed. + nodeID = RLPReader.toBytes(_node); + } + + return BytesLib.toBytes32(nodeID); + } + + /** + * @notice Gets the path for a leaf or extension node. + * @param _node Node to get a path for. + * @return Node path, converted to an array of nibbles. + */ + function getNodePath( + TrieNode memory _node + ) internal pure returns (bytes memory) { + return BytesLib.toNibbles(RLPReader.toBytes(_node.decoded[0])); + } + + /** + * @notice Gets the key for a leaf or extension node. Keys are essentially + * just paths without any prefix. + * @param _node Node to get a key for. + * @return Node key, converted to an array of nibbles. + */ + function getNodeKey( + TrieNode memory _node + ) internal pure returns (bytes memory) { + return removeHexPrefix(getNodePath(_node)); + } + + /** + * @notice Gets the path for a node. + * @param _node Node to get a value for. + * @return Node value, as hex bytes. + */ + function getNodeValue( + TrieNode memory _node + ) internal pure returns (bytes memory) { + return RLPReader.toBytes(_node.decoded[_node.decoded.length - 1]); + } + + /** + * @notice Computes the node hash for an encoded node. Nodes < 32 bytes + * are not hashed, all others are keccak256 hashed. + * @param _encoded Encoded node to hash. + * @return Hash of the encoded node. Simply the input if < 32 bytes. + */ + function getNodeHash( + bytes memory _encoded + ) internal pure returns (bytes memory) { + if (_encoded.length < 32) { + return _encoded; + } else { + return abi.encodePacked(keccak256(_encoded)); + } + } + + /** + * @notice Determines the type for a given node. + * @param _node Node to determine a type for. + * @return Type of the node; BranchNode/ExtensionNode/LeafNode. + */ + function getNodeType( + TrieNode memory _node + ) internal pure returns (NodeType) { + if (_node.decoded.length == BRANCH_NODE_LENGTH) { + return NodeType.BranchNode; + } else if (_node.decoded.length == LEAF_OR_EXTENSION_NODE_LENGTH) { + bytes memory path = getNodePath(_node); + uint8 prefix = uint8(path[0]); + + if (prefix == PREFIX_LEAF_EVEN || prefix == PREFIX_LEAF_ODD) { + return NodeType.LeafNode; + } else if (prefix == PREFIX_EXTENSION_EVEN || prefix == PREFIX_EXTENSION_ODD) { + return NodeType.ExtensionNode; + } + } + + revert("Invalid node type"); + } + + /** + * @notice Utility; determines the number of nibbles shared between two + * nibble arrays. + * @param _a First nibble array. + * @param _b Second nibble array. + * @return Number of shared nibbles. + */ + function getSharedNibbleLength(bytes memory _a, bytes memory _b) internal pure returns (uint256) { + uint256 i = 0; + while (_a.length > i && _b.length > i && _a[i] == _b[i]) { + i++; + } + return i; + } + + /** + * @notice Utility; converts an RLP-encoded node into our nice struct. + * @param _raw RLP-encoded node to convert. + * @return Node as a TrieNode struct. + */ + function makeNode( + bytes[] memory _raw + ) internal pure returns (TrieNode memory) { + bytes memory encoded = RLPWriter.encodeList(_raw); + + return TrieNode({ + encoded: encoded, + decoded: RLPReader.toList(RLPReader.toRlpItem(encoded)) + }); + } + + /** + * @notice Utility; converts an RLP-decoded node into our nice struct. + * @param _items RLP-decoded node to convert. + * @return Node as a TrieNode struct. + */ + function makeNode( + RLPReader.RLPItem[] memory _items + ) internal pure returns (TrieNode memory) { + bytes[] memory raw = new bytes[](_items.length); + for (uint256 i = 0; i < _items.length; i++) { + raw[i] = RLPReader.toRlpBytes(_items[i]); + } + return makeNode(raw); + } + + + + /** + * @notice Creates a new extension node. + * @param _key Key for the extension node, unprefixed. + * @param _value Value for the extension node. + * @return New extension node with the given k/v pair. + */ + function makeExtensionNode( + bytes memory _key, + bytes memory _value + ) internal pure returns (TrieNode memory) { + bytes[] memory raw = new bytes[](2); + bytes memory key = addHexPrefix(_key, false); + raw[0] = RLPWriter.encodeBytes(BytesLib.fromNibbles(key)); + raw[1] = RLPWriter.encodeBytes(_value); + return makeNode(raw); + } + + /** + * @notice Creates a new leaf node. + * @dev This function is essentially identical to `makeExtensionNode`. + * Although we could route both to a single method with a flag, it's + * more gas efficient to keep them separate and duplicate the logic. + * @param _key Key for the leaf node, unprefixed. + * @param _value Value for the leaf node. + * @return New leaf node with the given k/v pair. + */ + function makeLeafNode( + bytes memory _key, + bytes memory _value + ) internal pure returns (TrieNode memory) { + bytes[] memory raw = new bytes[](2); + bytes memory key = addHexPrefix(_key, true); + raw[0] = RLPWriter.encodeBytes(BytesLib.fromNibbles(key)); + raw[1] = RLPWriter.encodeBytes(_value); + return makeNode(raw); + } + + /** + * @notice Creates an empty branch node. + * @return Empty branch node as a TrieNode stuct. + */ + function makeEmptyBranchNode() internal pure returns (TrieNode memory) { + bytes[] memory raw = new bytes[](BRANCH_NODE_LENGTH); + for (uint256 i = 0; i < raw.length; i++) { + raw[i] = hex'80'; + } + return makeNode(raw); + } + + /** + * @notice Modifies the value slot for a given branch. + * @param _branch Branch node to modify. + * @param _value Value to insert into the branch. + * @return Modified branch node. + */ + function editBranchValue( + TrieNode memory _branch, + bytes memory _value + ) internal pure returns (TrieNode memory) { + bytes memory encoded = RLPWriter.encodeBytes(_value); + _branch.decoded[_branch.decoded.length - 1] = RLPReader.toRlpItem(encoded); + return makeNode(_branch.decoded); + } + + /** + * @notice Modifies a slot at an index for a given branch. + * @param _branch Branch node to modify. + * @param _index Slot index to modify. + * @param _value Value to insert into the slot. + * @return Modified branch node. + */ + function editBranchIndex( + TrieNode memory _branch, + uint8 _index, + bytes memory _value + ) internal pure returns (TrieNode memory) { + bytes memory encoded = _value.length < 32 ? _value : RLPWriter.encodeBytes(_value); + _branch.decoded[_index] = RLPReader.toRlpItem(encoded); + return makeNode(_branch.decoded); + } + + /** + * @notice Utility; adds a prefix to a key. + * @param _key Key to prefix. + * @param _isLeaf Whether or not the key belongs to a leaf. + * @return Prefixed key. + */ + function addHexPrefix( + bytes memory _key, + bool _isLeaf + ) internal pure returns (bytes memory) { + uint8 prefix = _isLeaf ? uint8(0x02) : uint8(0x00); + uint8 offset = uint8(_key.length % 2); + bytes memory prefixed = new bytes(2 - offset); + prefixed[0] = bytes1(prefix + offset); + return BytesLib.concat(prefixed, _key); + } + + /** + * @notice Utility; removes a prefix from a path. + * @param _path Path to remove the prefix from. + * @return Unprefixed key. + */ + function removeHexPrefix( + bytes memory _path + ) internal pure returns (bytes memory) { + if (uint8(_path[0]) % 2 == 0) { + return BytesLib.slice(_path, 2); + } else { + return BytesLib.slice(_path, 1); + } + } + + /** + * @notice Utility; combines two node arrays. Array lengths are required + * because the actual lengths may be longer than the filled lengths. + * Array resizing is extremely costly and should be avoided. + * @param _a First array to join. + * @param _aLength Length of the first array. + * @param _b Second array to join. + * @param _bLength Length of the second array. + * @return Combined node array. + */ + function joinNodeArrays( + TrieNode[] memory _a, + uint256 _aLength, + TrieNode[] memory _b, + uint256 _bLength + ) internal pure returns (TrieNode[] memory) { + TrieNode[] memory ret = new TrieNode[](_aLength + _bLength); + + // Copy elements from the first array. + for (uint256 i = 0; i < _aLength; i++) { + ret[i] = _a[i]; + } + + // Copy elements from the second array. + for (uint256 i = 0; i < _bLength; i++) { + ret[i + _aLength] = _b[i]; + } + + return ret; + } +} \ No newline at end of file diff --git a/packages/rollup-contracts/contracts/utils/RLPReader.sol b/packages/rollup-contracts/contracts/utils/RLPReader.sol new file mode 100644 index 0000000000000..cdc078122e005 --- /dev/null +++ b/packages/rollup-contracts/contracts/utils/RLPReader.sol @@ -0,0 +1,265 @@ +/* +* @author Hamdi Allam hamdi.allam97@gmail.com +* Please reach out with any questions or concerns +*/ +pragma solidity ^0.5.0; + +library RLPReader { + uint8 constant STRING_SHORT_START = 0x80; + uint8 constant STRING_LONG_START = 0xb8; + uint8 constant LIST_SHORT_START = 0xc0; + uint8 constant LIST_LONG_START = 0xf8; + + uint8 constant WORD_SIZE = 32; + + struct RLPItem { + uint len; + uint memPtr; + } + + /* + * @param item RLP encoded bytes + */ + function toRlpItem(bytes memory item) internal pure returns (RLPItem memory) { + uint memPtr; + assembly { + memPtr := add(item, 0x20) + } + + return RLPItem(item.length, memPtr); + } + + /* + * @param item RLP encoded bytes + */ + function rlpLen(RLPItem memory item) internal pure returns (uint) { + return item.len; + } + + /* + * @param item RLP encoded bytes + */ + function payloadLen(RLPItem memory item) internal pure returns (uint) { + return item.len - _payloadOffset(item.memPtr); + } + + /* + * @param item RLP encoded list in bytes + */ + function toList(RLPItem memory item) internal pure returns (RLPItem[] memory result) { + require(isList(item)); + + uint items = numItems(item); + result = new RLPItem[](items); + + uint memPtr = item.memPtr + _payloadOffset(item.memPtr); + uint dataLen; + for (uint i = 0; i < items; i++) { + dataLen = _itemLength(memPtr); + result[i] = RLPItem(dataLen, memPtr); + memPtr = memPtr + dataLen; + } + } + + // @return indicator whether encoded payload is a list. negate this function call for isData. + function isList(RLPItem memory item) internal pure returns (bool) { + if (item.len == 0) return false; + + uint8 byte0; + uint memPtr = item.memPtr; + assembly { + byte0 := byte(0, mload(memPtr)) + } + + if (byte0 < LIST_SHORT_START) + return false; + return true; + } + + /** RLPItem conversions into data types **/ + + // @returns raw rlp encoding in bytes + function toRlpBytes(RLPItem memory item) internal pure returns (bytes memory) { + bytes memory result = new bytes(item.len); + if (result.length == 0) return result; + + uint ptr; + assembly { + ptr := add(0x20, result) + } + + copy(item.memPtr, ptr, item.len); + return result; + } + + // any non-zero byte is considered true + function toBoolean(RLPItem memory item) internal pure returns (bool) { + require(item.len == 1); + uint result; + uint memPtr = item.memPtr; + assembly { + result := byte(0, mload(memPtr)) + } + + return result == 0 ? false : true; + } + + function toAddress(RLPItem memory item) internal pure returns (address) { + // 1 byte for the length prefix + require(item.len == 21); + + return address(toUint(item)); + } + + function toUint(RLPItem memory item) internal pure returns (uint) { + require(item.len > 0 && item.len <= 33); + + uint offset = _payloadOffset(item.memPtr); + uint len = item.len - offset; + + uint result; + uint memPtr = item.memPtr + offset; + assembly { + result := mload(memPtr) + + // shfit to the correct location if neccesary + if lt(len, 32) { + result := div(result, exp(256, sub(32, len))) + } + } + + return result; + } + + // enforces 32 byte length + function toUintStrict(RLPItem memory item) internal pure returns (uint) { + // one byte prefix + require(item.len == 33); + + uint result; + uint memPtr = item.memPtr + 1; + assembly { + result := mload(memPtr) + } + + return result; + } + + function toBytes(RLPItem memory item) internal pure returns (bytes memory) { + require(item.len > 0); + + uint offset = _payloadOffset(item.memPtr); + uint len = item.len - offset; // data length + bytes memory result = new bytes(len); + + uint destPtr; + assembly { + destPtr := add(0x20, result) + } + + copy(item.memPtr + offset, destPtr, len); + return result; + } + + /* + * Private Helpers + */ + + // @return number of payload items inside an encoded list. + function numItems(RLPItem memory item) private pure returns (uint) { + if (item.len == 0) return 0; + + uint count = 0; + uint currPtr = item.memPtr + _payloadOffset(item.memPtr); + uint endPtr = item.memPtr + item.len; + while (currPtr < endPtr) { + currPtr = currPtr + _itemLength(currPtr); // skip over an item + count++; + } + + return count; + } + + // @return entire rlp item byte length + function _itemLength(uint memPtr) private pure returns (uint len) { + uint byte0; + assembly { + byte0 := byte(0, mload(memPtr)) + } + + if (byte0 < STRING_SHORT_START) + return 1; + + else if (byte0 < STRING_LONG_START) + return byte0 - STRING_SHORT_START + 1; + + else if (byte0 < LIST_SHORT_START) { + assembly { + let byteLen := sub(byte0, 0xb7) // # of bytes the actual length is + memPtr := add(memPtr, 1) // skip over the first byte + + /* 32 byte word size */ + let dataLen := div(mload(memPtr), exp(256, sub(32, byteLen))) // right shifting to get the len + len := add(dataLen, add(byteLen, 1)) + } + } + + else if (byte0 < LIST_LONG_START) { + return byte0 - LIST_SHORT_START + 1; + } + + else { + assembly { + let byteLen := sub(byte0, 0xf7) + memPtr := add(memPtr, 1) + + let dataLen := div(mload(memPtr), exp(256, sub(32, byteLen))) // right shifting to the correct length + len := add(dataLen, add(byteLen, 1)) + } + } + } + + // @return number of bytes until the data + function _payloadOffset(uint memPtr) private pure returns (uint) { + uint byte0; + assembly { + byte0 := byte(0, mload(memPtr)) + } + + if (byte0 < STRING_SHORT_START) + return 0; + else if (byte0 < STRING_LONG_START || (byte0 >= LIST_SHORT_START && byte0 < LIST_LONG_START)) + return 1; + else if (byte0 < LIST_SHORT_START) // being explicit + return byte0 - (STRING_LONG_START - 1) + 1; + else + return byte0 - (LIST_LONG_START - 1) + 1; + } + + /* + * @param src Pointer to source + * @param dest Pointer to destination + * @param len Amount of memory to copy from the source + */ + function copy(uint src, uint dest, uint len) private pure { + if (len == 0) return; + + // copy as many word sizes as possible + for (; len >= WORD_SIZE; len -= WORD_SIZE) { + assembly { + mstore(dest, mload(src)) + } + + src += WORD_SIZE; + dest += WORD_SIZE; + } + + // left over bytes. Mask is used to remove unwanted bytes from the word + uint mask = 256 ** (WORD_SIZE - len) - 1; + assembly { + let srcpart := and(mload(src), not(mask)) // zero out src + let destpart := and(mload(dest), mask) // retrieve the bytes + mstore(dest, or(destpart, srcpart)) + } + } +} diff --git a/packages/rollup-contracts/contracts/utils/RLPWriter.sol b/packages/rollup-contracts/contracts/utils/RLPWriter.sol new file mode 100644 index 0000000000000..95346ed8d777a --- /dev/null +++ b/packages/rollup-contracts/contracts/utils/RLPWriter.sol @@ -0,0 +1,264 @@ +pragma solidity ^0.5.0; +pragma experimental ABIEncoderV2; + +/** + * Source: https://github.com/omisego/plasma-mvp/blob/master/plasma/root_chain/contracts/RLPEncode.sol + * @title RLPEncode + * @dev A simple RLP encoding library. + * @author Bakaoh + */ +library RLPWriter { + /* + * Public functions + */ + + /** + * @dev RLP encodes a byte string. + * @param self The byte string to encode. + * @return The RLP encoded string in bytes. + */ + function encodeBytes(bytes memory self) internal pure returns (bytes memory) { + bytes memory encoded; + if (self.length == 1 && uint8(self[0]) < 128) { + encoded = self; + } else { + encoded = concat(encodeLength(self.length, 128), self); + } + return encoded; + } + + /** + * @dev RLP encodes a list of RLP encoded byte byte strings. + * @param self The list of RLP encoded byte strings. + * @return The RLP encoded list of items in bytes. + */ + function encodeList(bytes[] memory self) internal pure returns (bytes memory) { + bytes memory list = flatten(self); + return concat(encodeLength(list.length, 192), list); + } + + /** + * @dev RLP encodes a string. + * @param self The string to encode. + * @return The RLP encoded string in bytes. + */ + function encodeString(string memory self) internal pure returns (bytes memory) { + return encodeBytes(bytes(self)); + } + + /** + * @dev RLP encodes an address. + * @param self The address to encode. + * @return The RLP encoded address in bytes. + */ + function encodeAddress(address self) internal pure returns (bytes memory) { + bytes memory inputBytes; + assembly { + let m := mload(0x40) + mstore(add(m, 20), xor(0x140000000000000000000000000000000000000000, self)) + mstore(0x40, add(m, 52)) + inputBytes := m + } + return encodeBytes(inputBytes); + } + + /** + * @dev RLP encodes a uint. + * @param self The uint to encode. + * @return The RLP encoded uint in bytes. + */ + function encodeUint(uint self) internal pure returns (bytes memory) { + return encodeBytes(toBinary(self)); + } + + /** + * @dev RLP encodes an int. + * @param self The int to encode. + * @return The RLP encoded int in bytes. + */ + function encodeInt(int self) internal pure returns (bytes memory) { + return encodeUint(uint(self)); + } + + /** + * @dev RLP encodes a bool. + * @param self The bool to encode. + * @return The RLP encoded bool in bytes. + */ + function encodeBool(bool self) internal pure returns (bytes memory) { + bytes memory encoded = new bytes(1); + encoded[0] = (self ? bytes1(0x01) : bytes1(0x80)); + return encoded; + } + + + /* + * Private functions + */ + + /** + * @dev Encode the first byte, followed by the `len` in binary form if `length` is more than 55. + * @param len The length of the string or the payload. + * @param offset 128 if item is string, 192 if item is list. + * @return RLP encoded bytes. + */ + function encodeLength(uint len, uint offset) private pure returns (bytes memory) { + bytes memory encoded; + if (len < 56) { + encoded = new bytes(1); + encoded[0] = byte(uint8(len) + uint8(offset)); + } else { + uint lenLen; + uint i = 1; + while (len / i != 0) { + lenLen++; + i *= 256; + } + + encoded = new bytes(lenLen + 1); + encoded[0] = byte(uint8(lenLen) + uint8(offset) + 55); + for(i = 1; i <= lenLen; i++) { + encoded[i] = byte(uint8((len / (256**(lenLen-i))) % 256)); + } + } + return encoded; + } + + /** + * @dev Encode integer in big endian binary form with no leading zeroes. + * @notice TODO: This should be optimized with assembly to save gas costs. + * @param _x The integer to encode. + * @return RLP encoded bytes. + */ + function toBinary(uint _x) private pure returns (bytes memory) { + bytes memory b = new bytes(32); + assembly { + mstore(add(b, 32), _x) + } + uint i = 0; + for (; i < 32; i++) { + if (b[i] != 0) { + break; + } + } + bytes memory res = new bytes(32 - i); + for (uint j = 0; j < res.length; j++) { + res[j] = b[i++]; + } + return res; + } + + /** + * @dev Copies a piece of memory to another location. + * @notice From: https://github.com/Arachnid/solidity-stringutils/blob/master/src/strings.sol. + * @param _dest Destination location. + * @param _src Source location. + * @param _len Length of memory to copy. + */ + function memcpy(uint _dest, uint _src, uint _len) private pure { + uint dest = _dest; + uint src = _src; + uint len = _len; + + for(; len >= 32; len -= 32) { + assembly { + mstore(dest, mload(src)) + } + dest += 32; + src += 32; + } + + uint mask = 256 ** (32 - len) - 1; + assembly { + let srcpart := and(mload(src), not(mask)) + let destpart := and(mload(dest), mask) + mstore(dest, or(destpart, srcpart)) + } + } + + /** + * @dev Flattens a list of byte strings into one byte string. + * @notice From: https://github.com/sammayo/solidity-rlp-encoder/blob/master/RLPEncode.sol. + * @param _list List of byte strings to flatten. + * @return The flattened byte string. + */ + function flatten(bytes[] memory _list) private pure returns (bytes memory) { + if (_list.length == 0) { + return new bytes(0); + } + + uint len; + uint i = 0; + for (; i < _list.length; i++) { + len += _list[i].length; + } + + bytes memory flattened = new bytes(len); + uint flattenedPtr; + assembly { flattenedPtr := add(flattened, 0x20) } + + for(i = 0; i < _list.length; i++) { + bytes memory item = _list[i]; + + uint listPtr; + assembly { listPtr := add(item, 0x20)} + + memcpy(flattenedPtr, listPtr, item.length); + flattenedPtr += _list[i].length; + } + + return flattened; + } + + /** + * @dev Concatenates two bytes. + * @notice From: https://github.com/GNSPS/solidity-bytes-utils/blob/master/contracts/BytesLib.sol. + * @param _preBytes First byte string. + * @param _postBytes Second byte string. + * @return Both byte string combined. + */ + function concat(bytes memory _preBytes, bytes memory _postBytes) private pure returns (bytes memory) { + bytes memory tempBytes; + + assembly { + tempBytes := mload(0x40) + + let length := mload(_preBytes) + mstore(tempBytes, length) + + let mc := add(tempBytes, 0x20) + let end := add(mc, length) + + for { + let cc := add(_preBytes, 0x20) + } lt(mc, end) { + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + mstore(mc, mload(cc)) + } + + length := mload(_postBytes) + mstore(tempBytes, add(length, mload(tempBytes))) + + mc := end + end := add(mc, length) + + for { + let cc := add(_postBytes, 0x20) + } lt(mc, end) { + mc := add(mc, 0x20) + cc := add(cc, 0x20) + } { + mstore(mc, mload(cc)) + } + + mstore(0x40, and( + add(add(end, iszero(add(length, mload(_preBytes)))), 31), + not(31) + )) + } + + return tempBytes; + } +} diff --git a/packages/rollup-contracts/package.json b/packages/rollup-contracts/package.json index c2fd7ee707388..8b41e3faf5a62 100644 --- a/packages/rollup-contracts/package.json +++ b/packages/rollup-contracts/package.json @@ -10,7 +10,7 @@ "all": "yarn clean && yarn build && yarn test && yarn fix && yarn lint", "test": "waffle waffle-config.json && mocha --require ts-node/register 'test/*/*.spec.ts' --timeout 20000", "lint": "tslint --format stylish --project .", - "fix": "prettier --config ../../prettier-config.json --write 'index.ts' '{deploy,test}/**/*.ts'", + "fix": "prettier --config ../../prettier-config.json --write \"index.ts\" \"{deploy,test}/**/*.ts\"", "build": "waffle waffle-config.json && tsc -p .", "clean": "rimraf build/", "deploy:rollup-chain": "yarn build && node ./build/deploy/rollup-chain.js" @@ -48,10 +48,15 @@ "@eth-optimism/core-utils": "^0.0.1-alpha.25", "@types/sinon-chai": "^3.2.2", "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", "ethereum-waffle": "2.1.0", "ethers": "^4.0.37", + "merkle-patricia-tree": "git+https://github.com/kfichter/merkle-patricia-tree", "merkletreejs": "^0.1.7", - "openzeppelin-solidity": "^2.2.0" + "openzeppelin-solidity": "^2.2.0", + "random-bytes-seed": "^1.0.3", + "rlp": "^2.2.5", + "seedrandom": "^3.0.5" }, "gitHead": "ccce366645fca6bad46c5cf7f7ff2f407c6ba5fd" } diff --git a/packages/rollup-contracts/test/helpers/trie-helpers.ts b/packages/rollup-contracts/test/helpers/trie-helpers.ts new file mode 100644 index 0000000000000..4b78ad2bdf1af --- /dev/null +++ b/packages/rollup-contracts/test/helpers/trie-helpers.ts @@ -0,0 +1,200 @@ +import * as rlp from 'rlp' +import * as seedbytes from 'random-bytes-seed' +import * as seedfloat from 'seedrandom' +import { BaseTrie } from 'merkle-patricia-tree' + +interface UpdateTest { + proof: string + key: string + val: string + oldRoot: string + newRoot: string +} + +interface ProofTest { + proof: string + key: string + val: string + root: string +} + +interface TrieNode { + key: string + val: string +} + +/** + * Utility; converts a buffer or string into a '0x'-prefixed string. + * @param buf Element to convert. + * @returns Converted element. + */ +const toHexString = (buf: Buffer | string): string => { + return '0x' + Buffer.from(buf).toString('hex') +} + +/** + * Utility; generates a random integer. + * @param seed Seed to the random number generator. + * @param min Minimum for the RNG. + * @param max Maximum for the RNG. + * @returns Random integer between minimum and maximum. + */ +const randomInt = (seed: string, min: number, max: number): number => { + const randomFloat = seedfloat(seed) + min = Math.ceil(min) + max = Math.floor(max) + return Math.floor(randomFloat() * (max - min + 1)) + min +} + +/** + * Utility; creates a trie object from a list of nodes. + * @param nodes Nodes to seed the trie with. + * @returns Trie corresponding to the given nodes. + */ +const makeTrie = async (nodes: TrieNode[]): Promise => { + const trie = new BaseTrie() + + for (const node of nodes) { + await trie.put(Buffer.from(node.key), Buffer.from(node.val)) + } + + return trie +} + +/** + * Utility; generates random nodes. + * @param germ Seed to the random number generator. + * @param count Number of nodes to generate. + * @param keySize Size of the key for each node in bytes. + * @param valSize Size of the value for each node in bytes. + * @returns List of randomly generated nodes. + */ +const makeRandomNodes = ( + germ: string, + count: number, + keySize: number = 32, + valSize: number = 32 +): TrieNode[] => { + const randomBytes = seedbytes(germ) + const nodes: TrieNode[] = Array(count) + .fill({}) + .map(() => { + return { + key: randomBytes(keySize).toString('hex'), + val: randomBytes(valSize).toString('hex'), + } + }) + return nodes +} + +/** + * Generates inclusion/exclusion proof test parameters. + * @param nodes Nodes of the trie, or the trie itself. + * @param key Key to prove inclusion/exclusion for. + * @param val Value to prove inclusion/exclusion for. + * @returns Proof test parameters. + */ +export const makeProofTest = async ( + nodes: TrieNode[] | BaseTrie, + key: string, + val?: string +): Promise => { + const trie = nodes instanceof BaseTrie ? nodes : await makeTrie(nodes) + + const proof = await BaseTrie.prove(trie, Buffer.from(key)) + const ret = val + ? Buffer.from(val) + : await BaseTrie.verifyProof(trie.root, Buffer.from(key), proof) + + return { + proof: toHexString(rlp.encode(proof)), + key: toHexString(key), + val: toHexString(ret), + root: toHexString(trie.root), + } +} + +/** + * Automatically generates all possible leaf node inclusion proof tests. + * @param nodes Nodes to generate tests for. + * @returns All leaf node tests for the given nodes. + */ +export const makeAllProofTests = async ( + nodes: TrieNode[] +): Promise => { + const trie = await makeTrie(nodes) + const tests: ProofTest[] = [] + + for (const node of nodes) { + tests.push(await makeProofTest(trie, node.key)) + } + + return tests +} + +/** + * Generates a random inclusion proof test. + * @param germ Seed to the random number generator. + * @param count Number of nodes to create. + * @param keySize Key size in bytes. + * @param valSize Value size in bytes. + * @return Proof test parameters for the randomly generated nodes. + */ +export const makeRandomProofTest = async ( + germ: string, + count: number, + keySize: number = 32, + valSize: number = 32 +): Promise => { + const nodes = makeRandomNodes(germ, count, keySize, valSize) + return makeProofTest(nodes, nodes[randomInt(germ, 0, count)].key) +} + +/** + * Generates update test parameters. + * @param nodes Nodes in the trie. + * @param key Key to update. + * @param val Value to update. + * @returns Update test parameters. + */ +export const makeUpdateTest = async ( + nodes: TrieNode[], + key: string, + val: string +): Promise => { + const trie = await makeTrie(nodes) + + const proof = await BaseTrie.prove(trie, Buffer.from(key)) + const oldRoot = Buffer.from(trie.root) + + await trie.put(Buffer.from(key), Buffer.from(val)) + + return { + proof: toHexString(rlp.encode(proof)), + key: toHexString(key), + val: toHexString(val), + oldRoot: toHexString(oldRoot), + newRoot: toHexString(trie.root), + } +} + +/** + * Generates a random update test. + * @param germ Seed to the random number generator. + * @param count Number of nodes to create. + * @param keySize Key size in bytes. + * @param valSize Value size in bytes. + * @return Update test parameters for the randomly generated nodes. + */ +export const makeRandomUpdateTest = async ( + germ: string, + count: number, + keySize: number = 32, + valSize: number = 32 +): Promise => { + const nodes = makeRandomNodes(germ, count, keySize, valSize) + const randomBytes = seedbytes(germ) + const newKey = randomBytes(keySize).toString('hex') + const newVal = randomBytes(valSize).toString('hex') + return makeUpdateTest(nodes, newKey, newVal) +} diff --git a/packages/rollup-contracts/test/merklization/MerkleTrie.spec.ts b/packages/rollup-contracts/test/merklization/MerkleTrie.spec.ts new file mode 100644 index 0000000000000..64ba56a527cb1 --- /dev/null +++ b/packages/rollup-contracts/test/merklization/MerkleTrie.spec.ts @@ -0,0 +1,463 @@ +import { expect } from '../setup' + +import { createMockProvider, deployContract, getWallets } from 'ethereum-waffle' +import { Contract } from 'ethers' + +import * as MerkleTrie from '../../build/MerkleTrie.json' +import { + makeAllProofTests, + makeRandomProofTest, + makeProofTest, + makeUpdateTest, + makeRandomUpdateTest, +} from '../helpers/trie-helpers' + +describe('MerkleTrie', () => { + const [wallet] = getWallets(createMockProvider()) + let trie: Contract + beforeEach(async () => { + trie = await deployContract(wallet, MerkleTrie) + }) + + describe('update', async () => { + it(`should support basic leaf value updates`, async () => { + const test = await makeUpdateTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key1aa', + 'some new value' + ) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support new leaf insertions`, async () => { + const test = await makeUpdateTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key4dd', + 'some new value' + ) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support modifications to an extension node`, async () => { + const test = await makeUpdateTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key1ab', + 'some new value' + ) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support modifications shifting an existing value into a branch`, async () => { + const test = await makeUpdateTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key1aaa', + 'some new value' + ) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support modifications shifting the new value into a branch`, async () => { + const test = await makeUpdateTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key1a', + 'some new value' + ) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support random updates (128 nodes)`, async () => { + const test = await makeRandomUpdateTest('seed.update.128', 128) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support random updates (256 nodes)`, async () => { + const test = await makeRandomUpdateTest('seed.update.256', 256) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support random updates (512 nodes)`, async () => { + const test = await makeRandomUpdateTest('seed.update.512', 512) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support random updates (1024 nodes)`, async () => { + const test = await makeRandomUpdateTest('seed.update.1024', 1024) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + + it(`should support random updates (2048 nodes)`, async () => { + const test = await makeRandomUpdateTest('seed.update.2048', 2048) + expect( + await trie.update(test.key, test.val, test.proof, test.oldRoot) + ).to.equal(test.newRoot) + }) + }) + + describe('verifyInclusionProof', async () => { + it(`should verify basic proofs`, async () => { + const tests = await makeAllProofTests([ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ]) + for (const test of tests) { + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + } + }) + + it(`should verify a single long key`, async () => { + const tests = await makeAllProofTests([ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + ]) + for (const test of tests) { + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + } + }) + + it(`should verify a single short key`, async () => { + const tests = await makeAllProofTests([ + { + key: 'key1aa', + val: '01234', + }, + ]) + for (const test of tests) { + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + } + }) + + it(`should verify a key in the middle`, async () => { + const tests = await makeAllProofTests([ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xxx', + }, + { + key: 'key1', + val: '0123456789012345678901234567890123456789Very_Long', + }, + { + key: 'key2bb', + val: 'aval3', + }, + { + key: 'key2', + val: 'short', + }, + { + key: 'key3cc', + val: 'aval3', + }, + { + key: 'key3', + val: '1234567890123456789012345678901', + }, + ]) + for (const test of tests) { + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + } + }) + + it(`should verify with embedded extension nodes`, async () => { + const tests = await makeAllProofTests([ + { + key: 'a', + val: 'a', + }, + { + key: 'b', + val: 'b', + }, + { + key: 'c', + val: 'c', + }, + ]) + for (const test of tests) { + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + } + }) + + it('should verify random data (128 nodes)', async () => { + const test = await makeRandomProofTest('seed.inclusion.128', 128) + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + + it('should verify random data (256 nodes)', async () => { + const test = await makeRandomProofTest('seed.inclusion.256', 256) + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + + it('should verify random data (512 nodes)', async () => { + const test = await makeRandomProofTest('seed.inclusion.512', 512) + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + + it('should verify random data (1024 nodes)', async () => { + const test = await makeRandomProofTest('seed.inclusion.1024', 1024) + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + + it('should verify random data (2048 nodes)', async () => { + const test = await makeRandomProofTest('seed.inclusion.2048', 2048) + expect( + await trie.verifyInclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + }) + + describe('verifyExclusionProof', () => { + it('should verify exclusion with an existing key and differing value', async () => { + const test = await makeProofTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key1aa', + 'not the correct value' + ) + + expect( + await trie.verifyExclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + + it('should verify exclusion with a non-existent extension of a leaf', async () => { + const test = await makeProofTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key1aab', + 'some arbitrary value' + ) + + expect( + await trie.verifyExclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + + it('should verify exclusion with a non-existent extension of a branch', async () => { + const test = await makeProofTest( + [ + { + key: 'key1aa', + val: '0123456789012345678901234567890123456789xx', + }, + { + key: 'key2bb', + val: 'aval2', + }, + { + key: 'key3cc', + val: 'aval3', + }, + ], + 'key4dd', + 'some arbitrary value' + ) + + expect( + await trie.verifyExclusionProof( + test.key, + test.val, + test.proof, + test.root + ) + ).to.equal(true) + }) + }) +}) diff --git a/packages/rollup-contracts/test/setup.ts b/packages/rollup-contracts/test/setup.ts index d92daff797721..cb7df3f702831 100644 --- a/packages/rollup-contracts/test/setup.ts +++ b/packages/rollup-contracts/test/setup.ts @@ -1,10 +1,13 @@ /* External Imports */ import chai = require('chai') +import chaiAsPromised = require('chai-as-promised') import bignum = require('chai-bignumber') import { solidity } from 'ethereum-waffle' chai.use(bignum()) +chai.use(chaiAsPromised) chai.use(solidity) const should = chai.should() +const expect = chai.expect -export { should } +export { should, expect } diff --git a/yarn.lock b/yarn.lock index a16a539159c7b..50cd79fed7f53 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2112,6 +2112,11 @@ bn.js@^5.1.1: resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.1.tgz#48efc4031a9c4041b9c99c6941d903463ab62eb5" integrity sha512-IUTD/REb78Z2eodka1QZyyEk66pciRcP6Sroka0aI3tG/iwIdYLrBD62RsubR7vqdt3WyX8p4jxeatzmRSphtA== +bn.js@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.2.tgz#c9686902d3c9a27729f43ab10f9d79c2004da7b0" + integrity sha512-40rZaf3bUNKTVYu9sIeeEGOg7g14Yvnj9kH7b50EiwX0Q7A6umbvfI5tvHaOERH0XigqKkfLkFQxzb4e6CIXnA== + body-parser@1.19.0, body-parser@^1.16.0, body-parser@^1.19.0: version "1.19.0" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" @@ -4048,6 +4053,19 @@ ethereumjs-util@^5.0.0, ethereumjs-util@^5.0.1, ethereumjs-util@^5.1.1, ethereum safe-buffer "^5.1.1" secp256k1 "^3.0.1" +ethereumjs-util@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-7.0.2.tgz#7e0d9fcd225ece6e49ee4ea65609d124715f1d15" + integrity sha512-ATAP02eJLpAlWGfiKQddNrRfZpwXiTFhRN2EM/yLXMCdBW/xjKYblNKcx8GLzzrjXg0ymotck+lam1nuV90arQ== + dependencies: + "@types/bn.js" "^4.11.3" + bn.js "^5.1.2" + create-hash "^1.1.2" + ethjs-util "0.1.6" + keccak "^3.0.0" + rlp "^2.2.4" + secp256k1 "^4.0.1" + ethereumjs-util@~6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-6.0.0.tgz#f14841c182b918615afefd744207c7932c8536c0" @@ -6047,6 +6065,14 @@ keccak@^2.0.0: nan "^2.14.0" safe-buffer "^5.2.0" +keccak@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/keccak/-/keccak-3.0.0.tgz#420d1de4a38a04f33ff8401f0535fb93756861d4" + integrity sha512-/4h4FIfFEpTEuySXi/nVFM5rqSKPnnhI7cL4K3MFSwoI3VyM7AhPSq3SsysARtnEBEeIKMBUWD8cTh9nHE8AkA== + dependencies: + node-addon-api "^2.0.0" + node-gyp-build "^4.2.0" + keccakjs@^0.2.0, keccakjs@^0.2.1: version "0.2.3" resolved "https://registry.yarnpkg.com/keccakjs/-/keccakjs-0.2.3.tgz#5e4e969ce39689a3861f445d7752ee3477f9fe72" @@ -6215,7 +6241,15 @@ level-mem@^3.0.1: level-packager "~4.0.0" memdown "~3.0.0" -level-packager@^5.1.0: +level-mem@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/level-mem/-/level-mem-5.0.1.tgz#c345126b74f5b8aa376dc77d36813a177ef8251d" + integrity sha512-qd+qUJHXsGSFoHTziptAKXoLX87QjR7v2KMbqncDXPxQuCdsQlzmyX+gwrEHhlzn08vkf8TyipYyMmiC6Gobzg== + dependencies: + level-packager "^5.0.3" + memdown "^5.0.0" + +level-packager@^5.0.3, level-packager@^5.1.0: version "5.1.1" resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-5.1.1.tgz#323ec842d6babe7336f70299c14df2e329c18939" integrity sha512-HMwMaQPlTC1IlcwT3+swhqf/NUO+ZhXVz6TY1zZIIZlIR0YSn8GtAAWmIvKjNY16ZkEg/JcpAuQskxsXqC0yOQ== @@ -6269,6 +6303,15 @@ level-ws@0.0.0: readable-stream "~1.0.15" xtend "~2.1.1" +level-ws@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/level-ws/-/level-ws-2.0.0.tgz#207a07bcd0164a0ec5d62c304b4615c54436d339" + integrity sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA== + dependencies: + inherits "^2.0.3" + readable-stream "^3.1.0" + xtend "^4.0.1" + level@^6.0.0, level@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/level/-/level-6.0.1.tgz#dc34c5edb81846a6de5079eac15706334b0d7cd6" @@ -6747,6 +6790,17 @@ merkle-patricia-tree@2.3.2, merkle-patricia-tree@^2.1.2, merkle-patricia-tree@^2 rlp "^2.0.0" semaphore ">=1.0.1" +"merkle-patricia-tree@git+https://github.com/kfichter/merkle-patricia-tree": + version "3.0.0" + resolved "git+https://github.com/kfichter/merkle-patricia-tree#ebd10c405be8ae909f1f82dea275a0e9ec1c8e46" + dependencies: + ethereumjs-util "^7.0.0" + level-mem "^5.0.1" + level-ws "^2.0.0" + readable-stream "^3.6.0" + rlp "^2.2.4" + semaphore-async-await "^1.5.1" + merkletreejs@^0.1.7: version "0.1.11" resolved "https://registry.yarnpkg.com/merkletreejs/-/merkletreejs-0.1.11.tgz#52f52522fa90ff3002633084fb6a87d539c5b724" @@ -7173,6 +7227,11 @@ nice-try@^1.0.4: resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== +node-addon-api@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.1.tgz#4fd0931bf6d7e48b219ff3e6abc73cbb0252b7a3" + integrity sha512-2WVfwRfIr1AVn3dRq4yRc2Hn35ND+mPJH6inC6bjpYCZVrpXPB4j3T6i//OGVfqVsR1t/X/axRulDsheq4F0LQ== + node-environment-flags@1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.5.tgz#fa930275f5bf5dae188d6192b24b4c8bbac3d76a" @@ -7216,6 +7275,11 @@ node-fetch@~1.7.1: encoding "^0.1.11" is-stream "^1.0.1" +node-gyp-build@^4.2.0: + version "4.2.2" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.2.tgz#3f44b65adaafd42fb6c3d81afd630e45c847eb66" + integrity sha512-Lqh7mrByWCM8Cf9UPqpeoVBBo5Ugx+RKu885GAzmLBVYjeywScxHXPGLa4JfYNZmcNGwzR0Glu5/9GaQZMFqyA== + node-gyp-build@~4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" @@ -8089,6 +8153,11 @@ quick-lru@^1.0.0: resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-1.1.0.tgz#4360b17c61136ad38078397ff11416e186dcfbb8" integrity sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g= +random-bytes-seed@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/random-bytes-seed/-/random-bytes-seed-1.0.3.tgz#126f1201dba2ec70cd0784f94a810396cad24a15" + integrity sha512-O+eniMt8Sj2iAn2q1x5VEirS/XvbtwYcXNDbOAcRtGN+OhC48cmzS5ksf9qEHRVKC1I8A4qzjucNVElddofB0A== + randomatic@^3.0.0: version "3.1.1" resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed" @@ -8215,7 +8284,7 @@ read@1, read@~1.0.1: string_decoder "~1.1.1" util-deprecate "~1.0.1" -"readable-stream@2 || 3", readable-stream@^3.0.2, readable-stream@^3.4.0: +"readable-stream@2 || 3", readable-stream@^3.0.2, readable-stream@^3.1.0, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -8500,6 +8569,13 @@ rlp@^2.0.0, rlp@^2.2.1, rlp@^2.2.2, rlp@^2.2.3: dependencies: bn.js "^4.11.1" +rlp@^2.2.4, rlp@^2.2.5: + version "2.2.5" + resolved "https://registry.yarnpkg.com/rlp/-/rlp-2.2.5.tgz#b0577b763e909f21a9dea31b4b235b2393f15ef1" + integrity sha512-y1QxTQOp0OZnjn19FxBmped4p+BSKPHwGndaqrESseyd2xXZtcgR3yuTIosh8CaMaOii9SKIYerBXnV/CpJ3qw== + dependencies: + bn.js "^4.11.1" + run-async@^2.2.0: version "2.4.0" resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.0.tgz#e59054a5b86876cfae07f431d18cbaddc594f1e8" @@ -8615,6 +8691,15 @@ secp256k1@^3.0.1: nan "^2.14.0" safe-buffer "^5.1.2" +secp256k1@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-4.0.1.tgz#b9570ca26ace9e74c3171512bba253da9c0b6d60" + integrity sha512-iGRjbGAKfXMqhtdkkuNxsgJQfJO8Oo78Rm7DAvsG3XKngq+nJIOGqrCSXcQqIVsmCj0wFanE5uTKFxV3T9j2wg== + dependencies: + elliptic "^6.5.2" + node-addon-api "^2.0.0" + node-gyp-build "^4.2.0" + seed-random@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/seed-random/-/seed-random-2.2.0.tgz#2a9b19e250a817099231a5b99a4daf80b7fbed54" @@ -8625,6 +8710,11 @@ seedrandom@3.0.1: resolved "https://registry.yarnpkg.com/seedrandom/-/seedrandom-3.0.1.tgz#eb3dde015bcf55df05a233514e5df44ef9dce083" integrity sha512-1/02Y/rUeU1CJBAGLebiC5Lbo5FnB22gQbIFFYTLkwvp1xdABZJH1sn4ZT1MzXmPpzv+Rf/Lu2NcsLJiK4rcDg== +seedrandom@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/seedrandom/-/seedrandom-3.0.5.tgz#54edc85c95222525b0c7a6f6b3543d8e0b3aa0a7" + integrity sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg== + seek-bzip@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/seek-bzip/-/seek-bzip-1.0.5.tgz#cfe917cb3d274bcffac792758af53173eb1fabdc" @@ -8632,6 +8722,11 @@ seek-bzip@^1.0.5: dependencies: commander "~2.8.1" +semaphore-async-await@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/semaphore-async-await/-/semaphore-async-await-1.5.1.tgz#857bef5e3644601ca4b9570b87e9df5ca12974fa" + integrity sha1-hXvvXjZEYBykuVcLh+nfXKEpdPo= + semaphore@>=1.0.1, semaphore@^1.0.3, semaphore@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/semaphore/-/semaphore-1.1.0.tgz#aaad8b86b20fe8e9b32b16dc2ee682a8cd26a8aa"