Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 24 additions & 2 deletions hathor/cli/load_from_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import re
import sys
from argparse import ArgumentParser, FileType

Expand Down Expand Up @@ -39,17 +40,38 @@ async def _load_from_logs(self) -> None:
settings = get_global_settings()
parser = VertexParser(settings=settings)

last_comment = ''
labels = {}

while True:
line_with_break = self._args.log_dump.readline()
if not line_with_break:
break
if line_with_break.startswith('//'):
continue
line = line_with_break.strip()
if not line:
continue
if line.startswith('//'):
last_comment = line[2:].strip()
continue
vertex_bytes = bytes.fromhex(line)
vertex = parser.deserialize(vertex_bytes)
labels[vertex.hash] = last_comment
await deferLater(self.reactor, 0, self.manager.on_new_tx, vertex)

print('---> graphviz')
from hathor.graphviz import GraphvizVisualizer
tx_storage = self.manager.tx_storage
graphviz = GraphvizVisualizer(tx_storage, include_verifications=True, include_funds=True, only_with_labels=True)
graphviz.labels[self.manager._settings.GENESIS_BLOCK_HASH] = 'g_block'
graphviz.labels[self.manager._settings.GENESIS_TX1_HASH] = 'g_tx1'
graphviz.labels[self.manager._settings.GENESIS_TX2_HASH] = 'g_tx2'
for k, v in labels.items():
if re.match(r'^a[0-9]+$', v):
continue
graphviz.labels[k] = v
dot = graphviz.dot()
dot.render('dot0')

self.manager.connections.disconnect_all_peers(force=True)
self.reactor.fireSystemEvent('shutdown')

Expand Down
2 changes: 1 addition & 1 deletion hathor/daa.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def __init__(self, *, settings: HathorSettings, test_mode: TestMode = TestMode.D
self.TEST_MODE = test_mode
DifficultyAdjustmentAlgorithm.singleton = self

@cpu.profiler(key=lambda _, block: 'calculate_block_difficulty!{}'.format(block.hash.hex()))
@cpu.profiler(key=lambda _, block: 'calculate_block_difficulty!{}'.format(block.hash.hex() if block._hash else None))
def calculate_block_difficulty(self, block: 'Block', parent_block_getter: Callable[['Block'], 'Block']) -> float:
""" Calculate block weight according to the ascendants of `block`, using calculate_next_weight."""
if self.TEST_MODE & TestMode.TEST_BLOCK_WEIGHT:
Expand Down
Empty file.
178 changes: 178 additions & 0 deletions hathor/dag_builder/builder.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
from __future__ import annotations

from collections import defaultdict
from dataclasses import dataclass, field
from typing import Any, Iterator, NamedTuple

from hathor.dag_builder.tokenizer import Token

class DAGBuilder:
def __init__(self):
self._nodes = {}

def parse_tokens(self, tokens: Iterator) -> None:
for parts in tokens:
match parts:
case (Token.PARENT, _from, _to):
self.add_parent_edge(_from, _to)

case (Token.SPEND, _from, _to, _txout_index):
self.add_spending_edge(_from, _to, _txout_index)

case (Token.ATTRIBUTE, name, key, value):
self.add_attribute(name, key, value)

case (Token.ORDER_BEFORE, _from, _to):
self.add_deps(_from, _to)

case (Token.OUTPUT, name, index, amount, token, attrs):
self.set_output(name, index, amount, token, attrs)

case (Token.BLOCKCHAIN, name, first_parent, begin_index, end_index):
self.add_blockchain(name, first_parent, begin_index, end_index)

case _:
raise NotImplementedError(parts)

def _get_node(self, name, *, default_type='unknown'):
if name not in self._nodes:
self._nodes[name] = DAGNode(name=name, type=default_type)
node = self._nodes[name]
# TODO Set type if unknown.
return node

def add_deps(self, _from, _to):
from_node = self._get_node(_from)
self._get_node(_to)
from_node.deps.add(_to)

def add_blockchain(self, prefix: str, first_parent: str | None, first_index: int, last_index: int):
prev = first_parent
for i in range(first_index, last_index + 1):
name = f'{prefix}{i}'
self._get_node(name, default_type='block')
if prev is not None:
self.add_parent_edge(name, prev)
prev = name

def add_parent_edge(self, _from, _to):
self._get_node(_to, default_type='transaction')
from_node = self._get_node(_from, default_type='transaction')
from_node.parents.add(_to)

def add_spending_edge(self, _from, _to, _txout_index):
self._get_node(_to, default_type='transaction')
from_node = self._get_node(_from, default_type='transaction')
from_node.inputs.add(DAGInput(_to, _txout_index))

def set_output(self, name, index, amount, token, attrs):
node = self._get_node(name)
if len(node.outputs) <= index:
node.outputs.extend([None] * (index - len(node.outputs) + 1))
node.outputs[index] = DAGOutput(amount, token, attrs)
if token != 'HTR':
self._get_node(token, default_type='token')
node.deps.add(token)

def add_attribute(self, name, key, value):
node = self._get_node(name)
node.attrs[key] = value

def topological_sorting(self) -> Iterator[DAGNode]:
direct_deps: dict[str, str[str]] = {}
rev_deps: dict[str, set[str]] = defaultdict(set)
seen: set[str] = set()
candidates: list[str] = []
for name, node in self._nodes.items():
assert name == node.name
deps = set(node.get_all_dependencies())
assert name not in direct_deps
direct_deps[name] = deps
for x in deps:
rev_deps[x].add(name)
if len(deps) == 0:
candidates.append(name)

for _ in range(len(self._nodes)):
if len(candidates) == 0:
# TODO improve error message showing at least one cycle
print()
print('direct_deps', direct_deps)
print()
print('rev_deps', rev_deps)
print()
print('seen', seen)
print()
print('not_seen', set(self._nodes.keys()) - seen)
print()
print('nodes')
for node in self._nodes.values():
print(node)
print()
raise RuntimeError('there is at least one cycle')
name = candidates.pop()
assert name not in seen
seen.add(name)
for d in rev_deps[name]:
direct_deps[d].remove(name)
if len(direct_deps[d]) == 0:
candidates.append(d)
del direct_deps[d]
node = self._get_node(name)
yield node

def build(self, tokenizer, settings, daa, genesis_wallet, wallet_factory) -> Iterator[tuple[str, 'BaseTransaction']]:
from hathor.dag_builder.default_filler import DefaultFiller
from hathor.dag_builder.vertex_exporter import VertexExporter

filler = DefaultFiller(self, settings, daa)

exporter = VertexExporter(self)
exporter.set_genesis_wallet(genesis_wallet)
exporter.set_wallet_factory(wallet_factory)
exporter.set_daa(daa)
exporter.set_settings(settings)

self._get_node('dummy', default_type='transaction')

self.parse_tokens(tokenizer)

for node in self._nodes.values():
if node.type == 'block':
continue
if node.type == 'genesis':
continue
if node.name == 'dummy':
continue
self.add_deps(node.name, 'dummy')

filler.run()
return exporter.export()


@dataclass(frozen=True)
class DAGNode:
name: str
type: str

attrs: dict[str, str] = field(default_factory=dict)
inputs: set[DAGInput] = field(default_factory=set)
outputs: list[DAGOutput] = field(default_factory=list)
parents: set[str] = field(default_factory=set)
deps: set[str] = field(default_factory=set)

def get_all_dependencies(self):
yield from self.parents
yield from (name for name, _ in self.inputs)
yield from self.deps


class DAGInput(NamedTuple):
node_name: str
index: int


class DAGOutput(NamedTuple):
amount: int
token: str
attrs: list[tuple[str, Any]]
45 changes: 45 additions & 0 deletions hathor/dag_builder/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from hathor.dag_builder.builder import DAGBuilder
from hathor.dag_builder.tokenizer import parse_file


def main(filename, genesis_seed):
from hathor.reactor import initialize_global_reactor

# reactor
_ = initialize_global_reactor(use_asyncio_reactor=False)

from hathor.conf.get_settings import get_global_settings
from hathor.daa import DifficultyAdjustmentAlgorithm
from hathor.wallet import HDWallet
settings = get_global_settings()

def wallet_factory(words=None):
if words is None:
words = ('bind daring above film health blush during tiny neck slight clown salmon '
'wine brown good setup later omit jaguar tourist rescue flip pet salute')
hd = HDWallet(words=words)
hd._manually_initialize()
return hd

genesis_wallet = wallet_factory(genesis_seed)
daa = DifficultyAdjustmentAlgorithm(settings=settings)

builder = DAGBuilder()
tokenizer = parse_file(filename)
it = builder.build(tokenizer, settings, daa, genesis_wallet, wallet_factory)

for node, vertex in it:
print('//', node)
print('//', repr(vertex))
print('//', node.name)
print(bytes(vertex).hex())
print()


if __name__ == '__main__':
import os
import sys
if 'HATHOR_CONFIG_YAML' not in os.environ:
os.environ['HATHOR_CONFIG_YAML'] = './hathor/conf/testnet.yml'
genesis_seed = os.environ['GENESIS_SEED']
main(sys.argv[1], genesis_seed)
Loading