diff --git a/hathor/conf/unittests.py b/hathor/conf/unittests.py index 39e0b67e0..afd06e266 100644 --- a/hathor/conf/unittests.py +++ b/hathor/conf/unittests.py @@ -25,9 +25,9 @@ MIN_SHARE_WEIGHT=2, MAX_TX_WEIGHT_DIFF=25.0, BLOCK_DIFFICULTY_N_BLOCKS=20, - GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a914fd05059b6006249543b82f36876a17c73fd2267b88ac'), - GENESIS_BLOCK_NONCE=0, - GENESIS_BLOCK_HASH=bytes.fromhex('339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), + GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a914d07bc82d6e0d1bb116614076645e9b87c8c83b4188ac'), + GENESIS_BLOCK_NONCE=5, + GENESIS_BLOCK_HASH=bytes.fromhex('2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488'), GENESIS_TX1_NONCE=6, GENESIS_TX1_HASH=bytes.fromhex('16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'), GENESIS_TX2_NONCE=2, diff --git a/hathor/conf/unittests.yml b/hathor/conf/unittests.yml index abab9ae90..fdcc5e261 100644 --- a/hathor/conf/unittests.yml +++ b/hathor/conf/unittests.yml @@ -7,9 +7,9 @@ MIN_TX_WEIGHT: 2 MIN_SHARE_WEIGHT: 2 MAX_TX_WEIGHT_DIFF: 25.0 BLOCK_DIFFICULTY_N_BLOCKS: 20 -GENESIS_OUTPUT_SCRIPT: 76a914fd05059b6006249543b82f36876a17c73fd2267b88ac -GENESIS_BLOCK_NONCE: 0 -GENESIS_BLOCK_HASH: 339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792 +GENESIS_OUTPUT_SCRIPT: 76a914d07bc82d6e0d1bb116614076645e9b87c8c83b4188ac +GENESIS_BLOCK_NONCE: 5 +GENESIS_BLOCK_HASH: 2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488 GENESIS_TX1_NONCE: 6 GENESIS_TX1_HASH: 16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952 GENESIS_TX2_NONCE: 2 diff --git a/hathor/dag_builder/__init__.py b/hathor/dag_builder/__init__.py new file mode 100644 index 000000000..3bcdb794e --- /dev/null +++ b/hathor/dag_builder/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.dag_builder.builder import DAGBuilder + +__all__ = ['DAGBuilder'] diff --git a/hathor/dag_builder/artifacts.py b/hathor/dag_builder/artifacts.py new file mode 100644 index 000000000..8137951ca --- /dev/null +++ b/hathor/dag_builder/artifacts.py @@ -0,0 +1,40 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Iterator, NamedTuple + +from hathor.dag_builder.types import DAGNode + +if TYPE_CHECKING: + from hathor.transaction import BaseTransaction + + +class _Pair(NamedTuple): + node: DAGNode + vertex: BaseTransaction + + +class DAGArtifacts: + def __init__(self, items: Iterator[tuple[DAGNode, BaseTransaction]]) -> None: + self.by_name: dict[str, _Pair] = {} + + v: list[_Pair] = [] + for node, vertex in items: + p = _Pair(node, vertex) + v.append(p) + self.by_name[node.name] = p + + self.list: tuple[_Pair, ...] = tuple(v) diff --git a/hathor/dag_builder/builder.py b/hathor/dag_builder/builder.py new file mode 100644 index 000000000..e28a6fdfd --- /dev/null +++ b/hathor/dag_builder/builder.py @@ -0,0 +1,210 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict +from typing import Iterator + +from structlog import get_logger +from typing_extensions import Self + +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.dag_builder.artifacts import DAGArtifacts +from hathor.dag_builder.tokenizer import Token, TokenType +from hathor.dag_builder.types import ( + AttributeType, + DAGInput, + DAGNode, + DAGNodeType, + DAGOutput, + VertexResolverType, + WalletFactoryType, +) +from hathor.wallet import BaseWallet + +logger = get_logger() + + +class DAGBuilder: + def __init__( + self, + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + genesis_wallet: BaseWallet, + wallet_factory: WalletFactoryType, + vertex_resolver: VertexResolverType, + ) -> None: + from hathor.dag_builder.default_filler import DefaultFiller + from hathor.dag_builder.tokenizer import tokenize + from hathor.dag_builder.vertex_exporter import VertexExporter + + self.log = logger.new() + + self._nodes: dict[str, DAGNode] = {} + self._tokenize = tokenize + self._filler = DefaultFiller(self, settings, daa) + self._exporter = VertexExporter( + builder=self, + settings=settings, + daa=daa, + genesis_wallet=genesis_wallet, + wallet_factory=wallet_factory, + vertex_resolver=vertex_resolver, + ) + + def parse_tokens(self, tokens: Iterator[Token]) -> None: + """Parse tokens and update the DAG accordingly.""" + for parts in tokens: + match parts: + case (TokenType.PARENT, (_from, _to)): + self.add_parent_edge(_from, _to) + + case (TokenType.SPEND, (_from, _to, _txout_index)): + self.add_spending_edge(_from, _to, _txout_index) + + case (TokenType.ATTRIBUTE, (name, key, value)): + self.add_attribute(name, key, value) + + case (TokenType.ORDER_BEFORE, (_from, _to)): + self.add_deps(_from, _to) + + case (TokenType.OUTPUT, (name, index, amount, token, attrs)): + self.set_output(name, index, amount, token, attrs) + + case (TokenType.BLOCKCHAIN, (name, first_parent, begin_index, end_index)): + self.add_blockchain(name, first_parent, begin_index, end_index) + + case _: + raise NotImplementedError(parts) + + def _get_node(self, name: str) -> DAGNode: + """Return a node.""" + return self._nodes[name] + + def _get_or_create_node(self, name: str, *, default_type: DAGNodeType = DAGNodeType.Unknown) -> DAGNode: + """Return a node, creating one if needed.""" + if name not in self._nodes: + node = DAGNode(name=name, type=default_type) + self._nodes[name] = node + else: + node = self._nodes[name] + if node.type is DAGNodeType.Unknown: + node.type = default_type + else: + if default_type != DAGNodeType.Unknown: + assert node.type is default_type, f'{node.type} != {default_type}' + return node + + def add_deps(self, _from: str, _to: str) -> Self: + """Add a dependency between two nodes. For clarity, `_to` has to be created before `_from`.""" + from_node = self._get_or_create_node(_from) + self._get_or_create_node(_to) + from_node.deps.add(_to) + return self + + def add_blockchain(self, prefix: str, first_parent: str | None, first_index: int, last_index: int) -> Self: + """Add a sequence of nodes representing a chain of blocks.""" + prev = first_parent + for i in range(first_index, last_index + 1): + name = f'{prefix}{i}' + self._get_or_create_node(name, default_type=DAGNodeType.Block) + if prev is not None: + self.add_parent_edge(name, prev) + prev = name + return self + + def add_parent_edge(self, _from: str, _to: str) -> Self: + """Add a parent edge between two nodes. For clarity, `_to` has to be created befre `_from`.""" + self._get_or_create_node(_to) + from_node = self._get_or_create_node(_from) + from_node.parents.add(_to) + return self + + def add_spending_edge(self, _from: str, _to: str, _txout_index: int) -> Self: + """Add a spending edge between two nodes. For clarity, `_to` has to be created before `_from`.""" + to_node = self._get_or_create_node(_to) + if len(to_node.outputs) <= _txout_index: + to_node.outputs.extend([None] * (_txout_index - len(to_node.outputs) + 1)) + to_node.outputs[_txout_index] = DAGOutput(0, '', {}) + from_node = self._get_or_create_node(_from) + from_node.inputs.add(DAGInput(_to, _txout_index)) + return self + + def set_output(self, name: str, index: int, amount: int, token: str, attrs: AttributeType) -> Self: + """Set information about an output.""" + node = self._get_or_create_node(name) + if len(node.outputs) <= index: + node.outputs.extend([None] * (index - len(node.outputs) + 1)) + node.outputs[index] = DAGOutput(amount, token, attrs) + if token != 'HTR': + self._get_or_create_node(token, default_type=DAGNodeType.Token) + node.deps.add(token) + return self + + def add_attribute(self, name: str, key: str, value: str) -> Self: + """Add an attribute to a node.""" + node = self._get_or_create_node(name) + if key == 'type': + node.type = DAGNodeType(value) + else: + node.attrs[key] = value + return self + + def topological_sorting(self) -> Iterator[DAGNode]: + """Run a topological sort on the DAG, yielding nodes in an order that respects all dependency constraints.""" + direct_deps: dict[str, set[str]] = {} + rev_deps: dict[str, set[str]] = defaultdict(set) + seen: set[str] = set() + candidates: list[str] = [] + for name, node in self._nodes.items(): + assert name == node.name + deps = set(node.get_all_dependencies()) + assert name not in direct_deps + direct_deps[name] = deps + for x in deps: + rev_deps[x].add(name) + if len(deps) == 0: + candidates.append(name) + + for _ in range(len(self._nodes)): + if len(candidates) == 0: + self.log('fail because there is at least one cycle in the dependencies', + direct_deps=direct_deps, + rev_deps=rev_deps, + seen=seen, + not_seen=set(self._nodes.keys()) - seen, + nodes=self._nodes) + raise RuntimeError('there is at least one cycle') + name = candidates.pop() + assert name not in seen + seen.add(name) + for d in rev_deps[name]: + direct_deps[d].remove(name) + if len(direct_deps[d]) == 0: + candidates.append(d) + del direct_deps[d] + node = self._get_node(name) + yield node + + def build(self) -> DAGArtifacts: + """Build all the transactions based on the DAG.""" + self._filler.run() + return DAGArtifacts(self._exporter.export()) + + def build_from_str(self, content: str) -> DAGArtifacts: + """Run build() after creating an initial DAG from a string.""" + self.parse_tokens(self._tokenize(content)) + return self.build() diff --git a/hathor/dag_builder/cli.py b/hathor/dag_builder/cli.py new file mode 100644 index 000000000..ff6184fb4 --- /dev/null +++ b/hathor/dag_builder/cli.py @@ -0,0 +1,66 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.dag_builder.builder import DAGBuilder + + +def main(filename: str, genesis_seed: str) -> None: + from hathor.reactor import initialize_global_reactor + + # reactor + _ = initialize_global_reactor(use_asyncio_reactor=False) + + from hathor.conf.get_settings import get_global_settings + from hathor.daa import DifficultyAdjustmentAlgorithm + from hathor.wallet import HDWallet + settings = get_global_settings() + + def wallet_factory(words=None): + if words is None: + words = ('bind daring above film health blush during tiny neck slight clown salmon ' + 'wine brown good setup later omit jaguar tourist rescue flip pet salute') + hd = HDWallet(words=words) + hd._manually_initialize() + return hd + + genesis_wallet = wallet_factory(genesis_seed) + daa = DifficultyAdjustmentAlgorithm(settings=settings) + + builder = DAGBuilder( + settings=settings, + daa=daa, + genesis_wallet=genesis_wallet, + wallet_factory=wallet_factory, + vertex_resolver=lambda x: None, + ) + + fp = open(filename, 'r') + content = fp.read() + artifacts = builder.build_from_str(content) + + for node, vertex in artifacts.list: + print('//', node) + print('//', repr(vertex)) + print('//', node.name) + print(bytes(vertex).hex()) + print() + + +if __name__ == '__main__': + import os + import sys + if 'HATHOR_CONFIG_YAML' not in os.environ: + os.environ['HATHOR_CONFIG_YAML'] = './hathor/conf/testnet.yml' + genesis_seed = os.environ['GENESIS_SEED'] + main(sys.argv[1], genesis_seed) diff --git a/hathor/dag_builder/default_filler.py b/hathor/dag_builder/default_filler.py new file mode 100644 index 000000000..95026e2cc --- /dev/null +++ b/hathor/dag_builder/default_filler.py @@ -0,0 +1,270 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict +from math import ceil + +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.dag_builder.builder import DAGBuilder, DAGInput, DAGNode, DAGNodeType, DAGOutput + + +class DefaultFiller: + """This filler applies a strategy to complete a DAG. + + The strategy is to create a dummy transaction that spends from the genesis block + and has one output for each transaction that needs HTR tokens. + + For custom tokens, it creates an output on the TokenCreationTransaction of the token + for each transaction that needs that custom token. + """ + + def __init__(self, builder: DAGBuilder, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm) -> None: + self._builder = builder + self._settings = settings + self._daa = daa + + # create the dummy and genesis nodes before builder.build() is called + genesis_block = self._get_or_create_node('genesis_block', default_type=DAGNodeType.Genesis) + if len(genesis_block.outputs) == 0: + genesis_block.outputs.append(DAGOutput(self._settings.GENESIS_TOKENS, 'HTR', {})) + self._get_or_create_node('genesis_1', default_type=DAGNodeType.Genesis) + self._get_or_create_node('genesis_2', default_type=DAGNodeType.Genesis) + self._get_or_create_node('dummy', default_type=DAGNodeType.Transaction) + + def _get_node(self, name: str) -> DAGNode: + """Get a node.""" + return self._builder._get_node(name) + + def _get_or_create_node(self, name: str, *, default_type: DAGNodeType = DAGNodeType.Unknown) -> DAGNode: + """Get a node.""" + return self._builder._get_or_create_node(name, default_type=default_type) + + @staticmethod + def get_next_index(outputs: list[DAGOutput | None]) -> int: + """Return the next index to place a new output. + + If all slots are full, it creates a new slot at the end.""" + for i, txout in enumerate(outputs): + if txout is None: + return i + outputs.append(None) + return len(outputs) - 1 + + def fill_parents(self, node: DAGNode, *, target: int = 2, candidates: list[str] | None = []) -> None: + """Fill parents of a vertex. + + Note: We shouldn't use the DAG transactions because it would confirm them, violating the DAG description.""" + # What's the best way to fill the parents? + # Should we use dummy transactions so it is unrelated to the other transactions? + if node.type is DAGNodeType.Genesis: + return + if len(node.parents) >= target: + return + + if not candidates: + candidates = [ + 'genesis_1', + 'genesis_2', + ] + for pi in candidates: + if len(node.parents) >= target: + break + node.parents.add(pi) + + def find_txin(self, amount: int, token: str) -> DAGInput: + """Create a DAGInput for an amount of tokens.""" + if token == 'HTR': + dummy = self._get_node('dummy') + dummy.inputs.add(DAGInput('genesis_block', 0)) + self.fill_parents(dummy) + + # TODO no more than 255 inputs + index = self.get_next_index(dummy.outputs) + dummy.outputs[index] = DAGOutput(amount, token, {'_origin': 'f1'}) + return DAGInput('dummy', index) + + else: + token_node = self._get_or_create_node(token) + index = self.get_next_index(token_node.outputs) + token_node.outputs[index] = DAGOutput(amount, token, {'_origin': 'f2'}) + return DAGInput(token, index) + + def calculate_balance(self, node: DAGNode) -> dict[str, int]: + """Calculate the balance for each token in a node.""" + ins: defaultdict[str, int] = defaultdict(int) + for tx_name, index in node.inputs: + node2 = self._get_or_create_node(tx_name) + txout = node2.outputs[index] + assert txout is not None + ins[txout.token] += txout.amount + + outs: defaultdict[str, int] = defaultdict(int) + for txout in node.outputs: + assert txout is not None + outs[txout.token] += txout.amount + + keys = set(ins.keys()) | set(outs.keys()) + balance = {} + for key in keys: + balance[key] = outs.get(key, 0) - ins.get(key, 0) + + return balance + + def balance_node_inputs_and_outputs(self, node: DAGNode) -> None: + """Balance the inputs and outputs of a node.""" + balance = self.calculate_balance(node) + + for key, diff in balance.items(): + # =0 balance + # <0 need output + # >0 need input + if diff < 0: + index = self.get_next_index(node.outputs) + node.outputs[index] = DAGOutput(abs(diff), key, {'_origin': 'f3'}) + elif diff > 0: + txin = self.find_txin(diff, key) + node.inputs.add(txin) + + def run(self) -> None: + """Run the filler.""" + for node in self._builder._nodes.values(): + if node.type is DAGNodeType.Unknown: + node.type = DAGNodeType.Transaction + + for node in self._builder._nodes.values(): + if node.type is DAGNodeType.Genesis: + continue + if node.name == 'dummy': + continue + if not node.inputs and not node.outputs: + if node.type is DAGNodeType.Block: + continue + node.outputs.append(DAGOutput(1, 'HTR', {'_origin': 'f4'})) + for i in range(len(node.outputs)): + txout = node.outputs[i] + if txout is None: + node.outputs[i] = DAGOutput(1, 'HTR', {'_origin': 'f5'}) + elif txout.amount == 0: + assert not txout.token + assert not txout.attrs + node.outputs[i] = DAGOutput(1, 'HTR', {'_origin': 'f6'}) + + tokens = [] + for node in list(self._builder.topological_sorting()): + match node.type: + case DAGNodeType.Genesis: + # do nothing + pass + + case DAGNodeType.Block: + if len(node.inputs) > 0: + raise ValueError + + if len(node.outputs) > 1: + raise ValueError + + blk_count = 0 + txs_count = 0 + parent_blk: DAGNode | None = None + for pi in node.parents: + pi_node = self._get_or_create_node(pi) + if pi_node.type is DAGNodeType.Block: + blk_count += 1 + assert parent_blk is None + parent_blk = pi_node + else: + txs_count += 1 + + candidates: list[str] = [] + if blk_count == 0: + node.parents.add('genesis_block') + else: + assert parent_blk is not None + candidates = [ + x + for x in parent_blk.parents + if x != 'genesis_block' and self._get_node(x).type is not DAGNodeType.Block + ] + + self.fill_parents(node, target=3, candidates=candidates) + assert len(node.parents) == 3 + + balance = self.calculate_balance(node) + assert set(balance.keys()).issubset({'HTR'}) + diff = balance.get('HTR', 0) + + target = self._daa.get_tokens_issued_per_block(1) # TODO Use the actual height. + assert diff >= 0 + assert diff <= target + + if diff < target: + node.outputs.append(DAGOutput(target - diff, 'HTR', {'_origin': 'f7'})) + + case DAGNodeType.Transaction: + if node.name == 'dummy': + continue + + self.fill_parents(node) + self.balance_node_inputs_and_outputs(node) + + case DAGNodeType.Token: + tokens.append(node.name) + self.fill_parents(node) + + case _: + raise NotImplementedError(node.type) + + for token in tokens: + node = self._get_or_create_node(token) + + balance = self.calculate_balance(node) + assert set(balance.keys()).issubset({'HTR', token}) + + htr_minimum = ceil(balance[token] / 100) + htr_balance = -balance.get('HTR', 0) + + if htr_balance > htr_minimum: + index = self.get_next_index(node.outputs) + node.outputs[index] = DAGOutput(htr_balance - htr_minimum, 'HTR', {'_origin': 'f8'}) + + elif htr_balance < htr_minimum: + txin = self.find_txin(htr_minimum - htr_balance, 'HTR') + node.inputs.add(txin) + + if 'dummy' in self._builder._nodes: + node = self._get_node('dummy') + balance = self.calculate_balance(node) + if not balance: + del self._builder._nodes['dummy'] + else: + assert set(balance.keys()) == {'HTR'} + diff = balance.get('HTR', 0) + + assert diff <= 0 + + if diff < 0: + index = self.get_next_index(node.outputs) + node.outputs[index] = DAGOutput(-diff, 'HTR', {}) + + for node in self._builder._nodes.values(): + if node.type is DAGNodeType.Block: + continue + if node.type is DAGNodeType.Genesis: + continue + if node.name == 'dummy': + continue + self._builder.add_deps(node.name, 'dummy') diff --git a/hathor/dag_builder/tokenizer.py b/hathor/dag_builder/tokenizer.py new file mode 100644 index 000000000..041eac32b --- /dev/null +++ b/hathor/dag_builder/tokenizer.py @@ -0,0 +1,172 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from enum import Enum, auto +from typing import Any, Iterator + +""" +A domain specific language to describe DAGs. + +Syntax: + + blockchain genesis a[2..5] # create blocks a2, a3, a4, and a5 where a2's parent is the genesis block + blockchain pi a[5..7] # create blocks a5, a6, and a7 where a5's parent is pi + a <-- b <-- c # a is a parent of b which is a parent of c + a --> b --> c # c is a parent of b which is a parent of a + a.out[i] <<< b c d # b, c, and d spend the i-th output of a + a < b < c # a must be created before b and b must be created before c + a > b > c # a must be created after b and b must be creater after c + a.attr = value # set value of attribute attr to a + +Special attributes: + a.out[i] = 100 HTR # set that the i-th output of a holds 100 HTR + a.out[i] = 100 TOKEN # set that the i-th output of a holds 100 TOKEN where TOKEN is a custom token + a.weight = 50 # set vertex weight + + +Example: + + blockchain genesis a[0..300] + blockchain a300 b[0..20] + blockchain b4 c[0..10] + + # reward lock + a300 < dummy + + b11 --> tx1 + b11 --> tx2 + + b14 --> tx1 + b14 --> tx3 + + c3 --> tx1 + c3 --> tx2 + + tx1 <-- tx2 <-- tx3 + + tx3 --> tx5 --> tx6 + + tx1.out[0] <<< tx2 tx3 + tx1.out[0] <<< tx4 + + a0.out[0] <<< tx1 + + tx1.out[0] = 100 HTR [wallet1] + tx1.out[1] = 50 TK1 [wallet2] + tx2.out[0] = 75 USDC [wallet1] + + USDC.out[0] = 100000 HTR + + b5 < c0 < c10 < b20 + b6 < tx3 + b16 < tx4 +""" + + +class TokenType(Enum): + BLOCKCHAIN = auto() + ATTRIBUTE = auto() + PARENT = auto() + SPEND = auto() + OUTPUT = auto() + ORDER_BEFORE = auto() + + +Token = tuple[TokenType, tuple[Any, ...]] + + +def collect_pairs(parts: list[str], expected_sep: str) -> Iterator[tuple[str, str]]: + """Pair all parts two by two checking the separator.""" + n = len(parts) + if n < 3: + raise SyntaxError + if n % 2 == 0: + raise SyntaxError + + k = (n - 1) // 2 + for i in range(k): + first = parts[2 * i] + sep = parts[2 * i + 1] + second = parts[2 * i + 2] + if parts[2 * i + 1] != expected_sep: + raise SyntaxError(f'inconsistent separator; got {sep} but expecting {expected_sep}') + yield (first, second) + + +def tokenize(content: str) -> Iterator[Token]: + """Parse content and generate tokens. + """ + blockchain_re = re.compile(r'^([a-zA-Z][a-zA-Z0-9-_]*)\[([0-9]+)..([0-9]+)\]$') + first_parent: str | None + for line in content.split('\n'): + line, _, _ = line.partition('#') + line = line.strip() + if not line: + continue + + # split() trims on both sides and remove empty parts + parts = line.split() + + if parts[0] == 'blockchain': + if len(parts) != 3: + raise SyntaxError + first_parent = parts[1] + if first_parent == 'genesis': + first_parent = None + match = blockchain_re.match(parts[2]) + if not match: + raise SyntaxError(f'invalid blockchain format: {line}') + name, begin, end = match.groups() + yield (TokenType.BLOCKCHAIN, (name, first_parent, int(begin), int(end))) + + elif parts[1] == '=': + name, key = parts[0].split('.', 1) + if key.startswith('out[') and key[-1] == ']': + index = int(key[4:-1]) + amount = int(parts[2]) + token = parts[3] + attrs = parts[4:] + yield (TokenType.OUTPUT, (name, index, amount, token, attrs)) + else: + yield (TokenType.ATTRIBUTE, (name, key, ' '.join(parts[2:]))) + + elif parts[1] == '<--': + for _to, _from in collect_pairs(parts, '<--'): + yield (TokenType.PARENT, (_from, _to)) + + elif parts[1] == '-->': + for _from, _to in collect_pairs(parts, '-->'): + yield (TokenType.PARENT, (_from, _to)) + + elif parts[1] == '<<<': + _to, _out = parts[0].split('.', 1) + if not _out.startswith('out['): + raise SyntaxError + if _out[-1] != ']': + raise SyntaxError + _txout_index = int(_out[4:-1]) + for _from in parts[2:]: + yield (TokenType.SPEND, (_from, _to, _txout_index)) + + elif parts[1] == '<': + for _a, _b in collect_pairs(parts, '<'): + yield (TokenType.ORDER_BEFORE, (_b, _a)) + + elif parts[1] == '>': + for _a, _b in collect_pairs(parts, '>'): + yield (TokenType.ORDER_BEFORE, (_a, _b)) + + else: + raise SyntaxError(line) diff --git a/hathor/dag_builder/types.py b/hathor/dag_builder/types.py new file mode 100644 index 000000000..46d5af170 --- /dev/null +++ b/hathor/dag_builder/types.py @@ -0,0 +1,63 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass, field +from enum import Enum +from typing import Any, Iterator, NamedTuple, TypeAlias + +from hathor.transaction import BaseTransaction +from hathor.wallet import BaseWallet + +AttributeType: TypeAlias = dict[str, str | int] +VertexResolverType: TypeAlias = Callable[[BaseTransaction], Any] +WalletFactoryType: TypeAlias = Callable[[], BaseWallet] + + +class DAGNodeType(Enum): + Unknown = 'unknown' + Block = 'block' + Transaction = 'transaction' + Token = 'token' + Genesis = 'genesis' + + +@dataclass +class DAGNode: + name: str + type: DAGNodeType + + attrs: dict[str, str] = field(default_factory=dict) + inputs: set[DAGInput] = field(default_factory=set) + outputs: list[DAGOutput | None] = field(default_factory=list) + parents: set[str] = field(default_factory=set) + deps: set[str] = field(default_factory=set) + + def get_all_dependencies(self) -> Iterator[str]: + yield from self.parents + yield from (name for name, _ in self.inputs) + yield from self.deps + + +class DAGInput(NamedTuple): + node_name: str + txout_index: int + + +class DAGOutput(NamedTuple): + amount: int + token: str + attrs: AttributeType diff --git a/hathor/dag_builder/vertex_exporter.py b/hathor/dag_builder/vertex_exporter.py new file mode 100644 index 000000000..4daa17f39 --- /dev/null +++ b/hathor/dag_builder/vertex_exporter.py @@ -0,0 +1,293 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Iterator + +from hathor.conf.settings import HathorSettings +from hathor.crypto.util import decode_address +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.dag_builder.builder import DAGBuilder, DAGNode +from hathor.dag_builder.types import DAGNodeType, VertexResolverType, WalletFactoryType +from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.base_transaction import TxInput, TxOutput +from hathor.transaction.scripts.p2pkh import P2PKH +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.wallet import BaseWallet + + +class VertexExporter: + """Transform a complete DAG into vertices. + """ + def __init__( + self, + *, + builder: DAGBuilder, + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + genesis_wallet: BaseWallet, + wallet_factory: WalletFactoryType, + vertex_resolver: VertexResolverType, + ) -> None: + self._builder = builder + self._vertices: dict[str, BaseTransaction] = {} + self._wallets: dict[str, BaseWallet] = {} + self._vertice_per_id: dict[bytes, BaseTransaction] = {} + self._block_height: dict[bytes, int] = {} + + self._settings = settings + self._daa = daa + self._wallet_factory = wallet_factory + self._vertex_resolver = vertex_resolver + + self._wallets['genesis'] = genesis_wallet + self._wallets['main'] = self._wallet_factory() + + def _get_node(self, name: str) -> DAGNode: + """Get node.""" + return self._builder._get_node(name) + + def get_vertex_id(self, name: str) -> bytes: + """Get the vertex id given its node name.""" + return self._vertices[name].hash + + def get_parent_block(self, block: Block) -> Block: + """Get the parent block of a block.""" + if block.parents[0] == self._settings.GENESIS_BLOCK_HASH: + genesis_block = Block( + timestamp=self._settings.GENESIS_BLOCK_TIMESTAMP, + weight=self._settings.MIN_BLOCK_WEIGHT, + ) + genesis_block.get_height = lambda: 0 # type: ignore[method-assign] + return genesis_block + parent = self._vertice_per_id[block.parents[0]] + assert isinstance(parent, Block) + return parent + + def _create_vertex_parents(self, node: DAGNode) -> tuple[list[bytes], list[bytes]]: + """Convert node parents to vertex parents, splitted into blocks and transactions.""" + block_parents = [] + txs_parents = [] + for pi in node.parents: + pi_node = self._get_node(pi) + if pi_node.type is DAGNodeType.Block or pi_node.name == 'genesis_block': + block_parents.append(self.get_vertex_id(pi)) + else: + txs_parents.append(self.get_vertex_id(pi)) + return block_parents, txs_parents + + def _create_vertex_txin(self, node: DAGNode) -> list[TxInput]: + """Create TxInput objects for a node.""" + inputs = [] + for tx_name, index in node.inputs: + txin = TxInput(tx_id=self.get_vertex_id(tx_name), index=index, data=b'') + inputs.append(txin) + return inputs + + def _create_vertex_txout( + self, + node: DAGNode, + *, + token_creation: bool = False + ) -> tuple[list[bytes], list[TxOutput]]: + """Create TxOutput objects for a node.""" + tokens: list[bytes] = [] + outputs: list[TxOutput] = [] + + for txout in node.outputs: + assert txout is not None + amount, token_name, attrs = txout + if token_name == 'HTR': + index = 0 + elif token_creation: + index = 1 + else: + token_uid = self.get_vertex_id(token_name) + try: + index = tokens.index(token_uid) + 1 + except ValueError: + tokens.append(token_uid) + index = len(tokens) + + script = self.get_next_p2pkh_script() + outputs.append(TxOutput(value=amount, token_data=index, script=script)) + + return tokens, outputs + + def get_next_p2pkh_script(self) -> bytes: + """Return next p2pkh script to be used in outputs.""" + address_b58 = self._wallets['main'].get_unused_address() + return P2PKH.create_output_script(decode_address(address_b58)) + + def get_min_timestamp(self, node: DAGNode) -> int: + """Return the minimum timestamp where a node is valid.""" + # update timestamp + deps = list(node.get_all_dependencies()) + assert deps + timestamp = 1 + max(self._vertices[name].timestamp for name in deps) + return timestamp + + def update_vertex_hash(self, vertex: BaseTransaction) -> None: + """Resolve vertex and update its hash.""" + self._vertex_resolver(vertex) + vertex.update_hash() + + def sign_all_inputs(self, node: DAGNode, vertex: Transaction) -> None: + """Sign all inputs of a vertex.""" + data_to_sign = vertex.get_sighash_all() + for txin in vertex.inputs: + pi = self._vertice_per_id[txin.tx_id] + txout = pi.outputs[txin.index] + p2pkh = P2PKH.parse_script(txout.script) + assert p2pkh is not None + + for wallet_name, wallet in self._wallets.items(): + try: + private_key = wallet.get_private_key(p2pkh.address) + break + except KeyError: + pass + + public_key_bytes, signature = wallet.get_input_aux_data(data_to_sign, private_key) + txin.data = P2PKH.create_input_data(public_key_bytes, signature) + + def create_vertex_token(self, node: DAGNode) -> TokenCreationTransaction: + """Create a token given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node, token_creation=True) + + assert len(block_parents) == 0 + assert len(tokens) == 0 + assert node.name != 'HTR' + + vertex = TokenCreationTransaction(parents=txs_parents, inputs=inputs, outputs=outputs) + vertex.token_name = node.name + vertex.token_symbol = node.name + vertex.timestamp = self.get_min_timestamp(node) + self.sign_all_inputs(node, vertex) + if 'weight' in node.attrs: + vertex.weight = float(node.attrs['weight']) + else: + vertex.weight = self._daa.minimum_tx_weight(vertex) + self.update_vertex_hash(vertex) + return vertex + + def create_vertex_block(self, node: DAGNode) -> Block: + """Create a Block given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node) + + assert len(inputs) == 0 + assert len(block_parents) == 1 + assert len(txs_parents) == 2 + + height = 1 + self._block_height[block_parents[0]] + + parents = block_parents + txs_parents + + blk = Block(parents=parents, outputs=outputs) + blk.timestamp = self.get_min_timestamp(node) + self._settings.AVG_TIME_BETWEEN_BLOCKS + blk.get_height = lambda: height # type: ignore[method-assign] + blk.update_hash() # the next call fails is blk.hash is None + if 'weight' in node.attrs: + blk.weight = float(node.attrs['weight']) + else: + blk.weight = self._daa.calculate_block_difficulty(blk, self.get_parent_block) + self.update_vertex_hash(blk) + self._block_height[blk.hash] = height + return blk + + def create_vertex_transaction(self, node: DAGNode) -> Transaction: + """Create a Transaction given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node) + + assert len(block_parents) == 0 + tx = Transaction(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) + tx.timestamp = self.get_min_timestamp(node) + self.sign_all_inputs(node, tx) + if 'weight' in node.attrs: + tx.weight = float(node.attrs['weight']) + else: + tx.weight = self._daa.minimum_tx_weight(tx) + self.update_vertex_hash(tx) + return tx + + def create_genesis_vertex(self, node: DAGNode) -> BaseTransaction: + """Create a genesis vertex given a node.""" + vertex: BaseTransaction + + if node.name == 'genesis_block': + vertex = Block() + vertex.hash = self._settings.GENESIS_BLOCK_HASH + vertex.timestamp = self._settings.GENESIS_BLOCK_TIMESTAMP + txout = TxOutput( + value=self._settings.GENESIS_TOKENS, + token_data=0, + script=self._settings.GENESIS_OUTPUT_SCRIPT + ) + vertex.outputs.append(txout) + + elif node.name == 'genesis_1': + vertex = Transaction() + vertex.hash = self._settings.GENESIS_TX1_HASH + vertex.timestamp = self._settings.GENESIS_TX1_TIMESTAMP + + elif node.name == 'genesis_2': + vertex = Transaction() + vertex.hash = self._settings.GENESIS_TX2_HASH + vertex.timestamp = self._settings.GENESIS_TX2_TIMESTAMP + + else: + raise NotImplementedError(node.name) + + return vertex + + def create_vertex(self, node: DAGNode) -> BaseTransaction: + """Create a vertex.""" + vertex: BaseTransaction + + match node.type: + case DAGNodeType.Block: + vertex = self.create_vertex_block(node) + + case DAGNodeType.Token: + vertex = self.create_vertex_token(node) + + case DAGNodeType.Transaction: + vertex = self.create_vertex_transaction(node) + + case DAGNodeType.Genesis: + vertex = self.create_genesis_vertex(node) + + case _: + raise NotImplementedError(node.type) + + assert vertex is not None + self._vertice_per_id[vertex.hash] = vertex + self._vertices[node.name] = vertex + return vertex + + def export(self) -> Iterator[tuple[DAGNode, BaseTransaction]]: + """Yield all pairs (node, vertex).""" + self._block_height[self._settings.GENESIS_BLOCK_HASH] = 0 + + vertex: BaseTransaction | None + + for node in self._builder.topological_sorting(): + vertex = self.create_vertex(node) + if node.type is not DAGNodeType.Genesis: + yield node, vertex diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index a6ee50aa9..26e226a7d 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -1069,7 +1069,8 @@ def _construct_genesis_block(self) -> Block: ) block.update_hash() - assert block.hash == self._settings.GENESIS_BLOCK_HASH + assert block.hash == self._settings.GENESIS_BLOCK_HASH, \ + f'{block.hash.hex()} != {self._settings.GENESIS_BLOCK_HASH.hex()}' return block def _construct_genesis_tx1(self) -> Transaction: diff --git a/tests/dag_builder/__init__.py b/tests/dag_builder/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/dag_builder/test_dag_builter.py b/tests/dag_builder/test_dag_builter.py new file mode 100644 index 000000000..b059f6e4b --- /dev/null +++ b/tests/dag_builder/test_dag_builter.py @@ -0,0 +1,208 @@ +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from tests import unittest + + +class DAGCreatorTestCase(unittest.TestCase): + _enable_sync_v1 = False + _enable_sync_v2 = True + + def setUp(self): + super().setUp() + + from hathor.simulator.patches import SimulatorCpuMiningService + from hathor.simulator.simulator import _build_vertex_verifiers + + cpu_mining_service = SimulatorCpuMiningService() + + builder = self.get_builder() \ + .set_vertex_verifiers_builder(_build_vertex_verifiers) \ + .set_cpu_mining_service(cpu_mining_service) + + self.manager = self.create_peer_from_builder(builder) + self.dag_builder = self.get_dag_builder(self.manager) + + def test_one_tx(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b1.out[0] <<< tx1 + b30 < tx1 # reward lock + b40 --> tx1 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + v_order = [node.name for node, _ in artifacts.list] + + tx1 = artifacts.by_name['tx1'].vertex + b1 = artifacts.by_name['b1'].vertex + b40 = artifacts.by_name['b40'].vertex + + # blockchain genesis b[1..50] + self.assertEqual(b1.parents[0], self._settings.GENESIS_BLOCK_HASH) + for i in range(2, 51): + prev = artifacts.by_name[f'b{i - 1}'].vertex + cur = artifacts.by_name[f'b{i}'].vertex + self.assertEqual(cur.parents[0], prev.hash) + + # b30 < tx1 + self.assertGreater(v_order.index('tx1'), v_order.index('b30')) + + # b1.out[0] <<< tx1 + self.assertEqual(tx1.inputs[0].tx_id, b1.hash) + + # b40 --> tx1 + self.assertEqual(tx1.get_metadata().first_block, b40.hash) + + def test_weight(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + blockchain b37 c[1..1] + b30 < dummy + b50 < c1 + + tx1.out[0] = 1 TKA + + TKA.weight = 31.8 + tx1.weight = 25.2 + c1.weight = 80.6 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + tx1 = artifacts.by_name['tx1'].vertex + tka = artifacts.by_name['TKA'].vertex + c1 = artifacts.by_name['c1'].vertex + b38 = artifacts.by_name['b38'].vertex + + self.assertAlmostEqual(tka.weight, 31.8) + self.assertAlmostEqual(tx1.weight, 25.2) + self.assertAlmostEqual(c1.weight, 80.6) + self.assertIsNotNone(b38.get_metadata().voided_by, b38) + + def test_spend_unspecified_utxo(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b30 < dummy + tx1.out[0] <<< tx2 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + tx1 = artifacts.by_name['tx1'].vertex + self.assertEqual(len(tx1.outputs), 1) + # the default filler fills unspecified utxos with 1 HTR + self.assertEqual(tx1.outputs[0].value, 1) + self.assertEqual(tx1.outputs[0].token_data, 0) + + def test_block_parents(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b30 < dummy + + b32 --> tx1 + + b34 --> tx2 + + b36 --> tx3 + b36 --> tx4 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + b0 = artifacts.by_name['b30'].vertex + b1 = artifacts.by_name['b31'].vertex + b2 = artifacts.by_name['b32'].vertex + b3 = artifacts.by_name['b33'].vertex + b4 = artifacts.by_name['b34'].vertex + b5 = artifacts.by_name['b35'].vertex + b6 = artifacts.by_name['b36'].vertex + b7 = artifacts.by_name['b37'].vertex + + tx1 = artifacts.by_name['tx1'].vertex + tx2 = artifacts.by_name['tx2'].vertex + tx3 = artifacts.by_name['tx3'].vertex + tx4 = artifacts.by_name['tx4'].vertex + + self.assertEqual(b2.parents[0], b1.hash) + self.assertEqual(b3.parents[0], b2.hash) + self.assertEqual(b4.parents[0], b3.hash) + self.assertEqual(b5.parents[0], b4.hash) + self.assertEqual(b6.parents[0], b5.hash) + + self.assertEqual(set(b1.parents[1:]), set(b0.parents[1:])) + self.assertEqual(set(b3.parents[1:]), set(b2.parents[1:])) + self.assertEqual(set(b5.parents[1:]), set(b4.parents[1:])) + self.assertEqual(set(b7.parents[1:]), set(b6.parents[1:])) + + self.assertTrue(set(b2.parents[1:]).issubset([tx1.hash] + b1.parents[1:])) + self.assertTrue(set(b4.parents[1:]).issubset([tx2.hash] + b3.parents[1:])) + self.assertEqual(set(b6.parents[1:]), {tx3.hash, tx4.hash}) + + def test_custom_token(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b1.out[0] <<< tx1 + tx1.out[1] = 100 TKA + b30 < tx1 # reward lock + b30 < dummy # reward lock + b40 --> tx1 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) + + tka = artifacts.by_name['TKA'].vertex + tx1 = artifacts.by_name['tx1'].vertex + + # TKA token creation transaction + self.assertIsInstance(tka, TokenCreationTransaction) + self.assertEqual(tka.token_name, 'TKA') + self.assertEqual(tka.token_symbol, 'TKA') + + # tx1.out[1] = 100 TKA + self.assertEqual(tx1.outputs[1].value, 100) + self.assertEqual(tx1.get_token_uid(tx1.outputs[1].token_data), tka.hash) + + def test_big_dag(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis a[0..30] + blockchain a30 b[0..20] + blockchain b4 c[0..10] + + a30 < dummy + + b11 --> tx1 + b11 --> tx2 + + b14 --> tx1 + b14 --> tx3 + + c3 --> tx1 + c3 --> tx2 + + tx1 <-- tx2 <-- tx3 + + tx3 --> tx5 --> tx6 + + tx1.out[0] <<< tx2 tx3 + tx1.out[0] <<< tx4 + + a0.out[0] <<< tx1 + + tx1.out[0] = 100 HTR [wallet1] + tx1.out[1] = 50 TK1 [wallet2] + tx2.out[0] = 75 USDC [wallet1] + + USDC.out[0] = 100000 HTR + + b5 < c0 < c10 < b20 + b6 < tx3 + b16 < tx4 + """) + + for node, vertex in artifacts.list: + self.manager.on_new_tx(vertex, fails_silently=False) diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index 2b7e413b1..0b8e0ed3b 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -43,6 +43,25 @@ class BaseEventSimulationScenariosTest(BaseEventSimulationTester): seed_config = 6946502462188444706 + def assert_response_equal(self, responses: list[EventResponse], expected: list[EventResponse]) -> None: + """Compare responses and expected responses. + """ + self.assertEqual(len(responses), len(expected)) + + for a, b in zip(responses, expected): + self.assertEqual(type(a), type(b)) + self.assertEqual(a.__fields__, b.__fields__) + self.assertEqual(a.event.__fields__, b.event.__fields__) + self.assertEqual(a.event.data.__fields__, b.event.data.__fields__) + + for field in ['type', 'peer_id', 'network', 'latest_event_id', 'stream_id']: + self.assertEqual(getattr(a, field), getattr(b, field)) + + for field in ['id', 'type', 'group_id']: + self.assertEqual(getattr(a.event, field), getattr(b.event, field)) + + self.assertEqual(type(a.event.data), type(b.event.data)) + def test_only_load(self) -> None: stream_id = self.manager._event_manager._stream_id assert stream_id is not None @@ -62,9 +81,7 @@ def test_only_load(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_single_chain_one_block(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -91,9 +108,7 @@ def test_single_chain_one_block(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_single_chain_blocks_and_transactions(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -157,9 +172,7 @@ def test_single_chain_blocks_and_transactions(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_reorg(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -206,9 +219,7 @@ def test_reorg(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_unvoided_transaction(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -274,9 +285,7 @@ def test_unvoided_transaction(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_invalid_mempool(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -347,9 +356,7 @@ def test_invalid_mempool(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=41, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_empty_script(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -412,9 +419,7 @@ def test_empty_script(self) -> None: # One NEW_VERTEX_ACCEPTED for a new block EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id)] # noqa: E501 - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def test_custom_script(self) -> None: stream_id = self.manager._event_manager._stream_id @@ -478,9 +483,7 @@ def test_custom_script(self) -> None: EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] - responses = _remove_timestamp(responses) - expected = _remove_timestamp(expected) - assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + self.assert_response_equal(responses, expected) def _start_stream(self) -> None: start_stream = StartStreamRequest(type='START_STREAM', window_size=1_000_000, last_ack_event_id=None) diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index f2839bf6b..dbb96b8f7 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -20,7 +20,7 @@ def get_genesis_output(): elif settings.NETWORK_NAME.startswith('testnet'): address = 'WdmDUMp8KvzhWB7KLgguA2wBiKsh4Ha8eX' elif settings.NETWORK_NAME == 'unittests': - address = 'HVayMofEDh4XGsaQJeRJKhutYxYodYNop6' + address = 'HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ' else: raise ValueError('Network unknown.') diff --git a/tests/unittest.py b/tests/unittest.py index afb11c1b0..94cef1c34 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -14,6 +14,7 @@ from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode +from hathor.dag_builder import DAGBuilder from hathor.event import EventManager from hathor.event.storage import EventStorage from hathor.manager import HathorManager @@ -30,6 +31,7 @@ from hathor.util import Random, not_none from hathor.wallet import BaseWallet, HDWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock +from tests.utils import GENESIS_SEED logger = get_logger() main = ut_main @@ -170,6 +172,18 @@ def _create_test_wallet(self, unlocked: bool = False) -> Wallet: wallet.lock() return wallet + def get_dag_builder(self, manager: HathorManager) -> DAGBuilder: + genesis_wallet = HDWallet(words=GENESIS_SEED) + genesis_wallet._manually_initialize() + + return DAGBuilder( + settings=manager._settings, + daa=manager.daa, + genesis_wallet=genesis_wallet, + wallet_factory=self.get_wallet, + vertex_resolver=lambda x: manager.cpu_mining_service.resolve(x), + ) + def get_builder(self) -> TestBuilder: builder = TestBuilder() builder.set_rng(self.rng) \ diff --git a/tests/utils.py b/tests/utils.py index a2566a3ca..67d98587b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,12 +8,13 @@ from typing import Any, Optional import requests +from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import ec from hathorlib.scripts import DataScript from twisted.internet.task import Clock from hathor.conf import HathorSettings -from hathor.crypto.util import decode_address, get_address_b58_from_public_key, get_private_key_from_bytes +from hathor.crypto.util import decode_address, get_address_b58_from_public_key from hathor.event.model.base_event import BaseEvent from hathor.event.model.event_data import TxData, TxMetadata from hathor.event.model.event_type import EventType @@ -349,14 +350,20 @@ def execute_tx_gen( execute(args) -def get_genesis_key() -> ec.EllipticCurvePrivateKeyWithSerialization: - private_key_bytes = base64.b64decode( - 'MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgOCgCddzDZsfKgiMJLOt97eov9RLwHeePyBIK2WPF8MChRA' - 'NCAAQ/XSOK+qniIY0F3X+lDrb55VQx5jWeBLhhzZnH6IzGVTtlAj9Ki73DVBm5+VXK400Idd6ddzS7FahBYYC7IaTl' +def get_genesis_key() -> ec.EllipticCurvePrivateKey: + from hathor.wallet import HDWallet + wallet = HDWallet(words=GENESIS_SEED) + wallet._manually_initialize() + key = wallet.get_key_at_index(0) + return ec.derive_private_key( + int.from_bytes(key.secret_exponent().to_bytes(32, 'big'), 'big'), + ec.SECP256K1(), + backend=default_backend() ) - return get_private_key_from_bytes(private_key_bytes) +GENESIS_SEED = ('coral light army gather adapt blossom school alcohol coral light army gather ' + 'adapt blossom school alcohol coral light army gather adapt blossom school awesome') GENESIS_PRIVATE_KEY = get_genesis_key() GENESIS_PUBLIC_KEY = GENESIS_PRIVATE_KEY.public_key() GENESIS_ADDRESS_B58 = get_address_b58_from_public_key(GENESIS_PUBLIC_KEY)