diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index acc739962..7527712e6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -8,6 +8,9 @@ on: # yamllint disable-line rule:truthy tags: - v* pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: matrix: runs-on: ubuntu-latest diff --git a/extras/custom_checks.sh b/extras/custom_checks.sh index 9ae088714..ece887832 100644 --- a/extras/custom_checks.sh +++ b/extras/custom_checks.sh @@ -72,7 +72,7 @@ function check_do_not_use_builtin_random_in_tests() { } function check_deprecated_typing() { - if grep -R '\' "${SOURCE_DIRS[@]}"; then + if grep -RI '\' "${SOURCE_DIRS[@]}"; then echo 'do not use typing.List/Tuple/Dict/... for type annotations use builtin list/tuple/dict/... instead' echo 'for more info check the PEP 585 doc: https://peps.python.org/pep-0585/' return 1 @@ -80,11 +80,47 @@ function check_deprecated_typing() { return 0 } +function check_do_not_import_tests_in_hathor() { + if grep -R '\<.*import .*tests.*\>\|\<.*from .*tests.* import\>' "hathor"; then + echo 'do not import test definitions in the hathor module' + echo 'move them from tests to hathor instead' + return 1 + fi + return 0 +} + +function check_do_not_import_from_hathor_in_entrypoints() { + PATTERN='^import .*hathor.*\|^from .*hathor.* import' + + if grep -R "$PATTERN" "hathor/cli" | grep -v 'from hathor.cli.run_node import RunNode' | grep -v '# skip-cli-import-custom-check'; then + echo 'do not import from `hathor` in the module-level of a CLI entrypoint.' + echo 'instead, import locally inside the function that uses the import.' + echo 'alternatively, comment `# skip-cli-import-custom-check` to exclude a line.' + return 1 + fi + return 0 +} + +function check_do_not_import_twisted_reactor_directly() { + EXCLUDES="--exclude=reactor.py --exclude=conftest.py" + PATTERN='\<.*from .*twisted.internet import .*reactor\>' + + if grep -R $EXCLUDES "$PATTERN" "${SOURCE_DIRS[@]}"; then + echo 'do not use `from twisted.internet import reactor` directly.' + echo 'instead, use `hathor.reactor.get_global_reactor()`.' + return 1 + fi + return 0 +} + # List of functions to be executed checks=( check_version_match check_do_not_use_builtin_random_in_tests check_deprecated_typing + check_do_not_import_tests_in_hathor + check_do_not_import_from_hathor_in_entrypoints + check_do_not_import_twisted_reactor_directly ) # Initialize a variable to track if any check fails diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 37bfcb8fe..d8ee85522 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -13,7 +13,7 @@ # limitations under the License. from enum import Enum -from typing import Any, NamedTuple, Optional +from typing import Any, Callable, NamedTuple, Optional, TypeAlias from structlog import get_logger @@ -21,6 +21,7 @@ from hathor.conf.get_settings import get_settings from hathor.conf.settings import HathorSettings as HathorSettingsType from hathor.consensus import ConsensusAlgorithm +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event import EventManager from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage from hathor.event.websocket import EventWebsocketFactory @@ -29,9 +30,11 @@ from hathor.feature_activation.feature_service import FeatureService from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer_id import PeerId from hathor.pubsub import PubSubManager +from hathor.reactor import ReactorProtocol as Reactor from hathor.storage import RocksDBStorage from hathor.stratum import StratumFactory from hathor.transaction.storage import ( @@ -40,7 +43,7 @@ TransactionRocksDBStorage, TransactionStorage, ) -from hathor.util import Random, Reactor, get_environment_info +from hathor.util import Random, get_environment_info, not_none from hathor.verification.verification_service import VerificationService, VertexVerifiers from hathor.wallet import BaseWallet, Wallet @@ -63,12 +66,19 @@ class BuildArtifacts(NamedTuple): pubsub: PubSubManager consensus: ConsensusAlgorithm tx_storage: TransactionStorage + feature_service: FeatureService indexes: Optional[IndexesManager] wallet: Optional[BaseWallet] rocksdb_storage: Optional[RocksDBStorage] stratum_factory: Optional[StratumFactory] +_VertexVerifiersBuilder: TypeAlias = Callable[ + [HathorSettingsType, DifficultyAdjustmentAlgorithm, FeatureService], + VertexVerifiers +] + + class Builder: """Builder builds the core objects to run a full node. @@ -102,7 +112,11 @@ def __init__(self) -> None: self._feature_service: Optional[FeatureService] = None self._bit_signaling_service: Optional[BitSignalingService] = None + self._daa: Optional[DifficultyAdjustmentAlgorithm] = None + self._cpu_mining_service: Optional[CpuMiningService] = None + self._vertex_verifiers: Optional[VertexVerifiers] = None + self._vertex_verifiers_builder: _VertexVerifiersBuilder | None = None self._verification_service: Optional[VerificationService] = None self._rocksdb_path: Optional[str] = None @@ -127,8 +141,7 @@ def __init__(self) -> None: self._enable_tokens_index: bool = False self._enable_utxo_index: bool = False - self._enable_sync_v1: bool = False - self._enable_sync_v1_1: bool = True + self._enable_sync_v1: bool = True self._enable_sync_v2: bool = False self._enable_stratum_server: Optional[bool] = None @@ -158,10 +171,12 @@ def build(self) -> BuildArtifacts: wallet = self._get_or_create_wallet() event_manager = self._get_or_create_event_manager() indexes = self._get_or_create_indexes_manager() - tx_storage = self._get_or_create_tx_storage(indexes) - feature_service = self._get_or_create_feature_service(tx_storage) - bit_signaling_service = self._get_or_create_bit_signaling_service(tx_storage) + tx_storage = self._get_or_create_tx_storage() + feature_service = self._get_or_create_feature_service() + bit_signaling_service = self._get_or_create_bit_signaling_service() verification_service = self._get_or_create_verification_service() + daa = self._get_or_create_daa() + cpu_mining_service = self._get_or_create_cpu_mining_service() if self._enable_address_index: indexes.enable_address_index(pubsub) @@ -186,6 +201,7 @@ def build(self) -> BuildArtifacts: network=self._network, pubsub=pubsub, consensus_algorithm=consensus_algorithm, + daa=daa, peer_id=peer_id, tx_storage=tx_storage, p2p_manager=p2p_manager, @@ -198,6 +214,7 @@ def build(self) -> BuildArtifacts: feature_service=feature_service, bit_signaling_service=bit_signaling_service, verification_service=verification_service, + cpu_mining_service=cpu_mining_service, **kwargs ) @@ -221,6 +238,7 @@ def build(self) -> BuildArtifacts: wallet=wallet, rocksdb_storage=self._rocksdb_storage, stratum_factory=stratum_factory, + feature_service=feature_service, ) return self.artifacts @@ -265,6 +283,7 @@ def set_peer_id(self, peer_id: PeerId) -> 'Builder': return self def _get_or_create_settings(self) -> HathorSettingsType: + """Return the HathorSettings instance set on this builder, or a new one if not set.""" if self._settings is None: self._settings = get_settings() return self._settings @@ -293,7 +312,7 @@ def _get_or_create_pubsub(self) -> PubSubManager: return self._pubsub def _create_stratum_server(self, manager: HathorManager) -> StratumFactory: - stratum_factory = StratumFactory(manager=manager) + stratum_factory = StratumFactory(manager=manager, reactor=self._get_reactor()) manager.stratum_factory = stratum_factory manager.metrics.stratum_factory = stratum_factory return stratum_factory @@ -316,6 +335,10 @@ def _get_or_create_rocksdb_storage(self) -> RocksDBStorage: return self._rocksdb_storage def _get_p2p_manager(self) -> ConnectionsManager: + from hathor.p2p.sync_v1.factory import SyncV11Factory + from hathor.p2p.sync_v2.factory import SyncV2Factory + from hathor.p2p.sync_version import SyncVersion + enable_ssl = True reactor = self._get_reactor() my_peer = self._get_peer_id() @@ -330,10 +353,13 @@ def _get_p2p_manager(self) -> ConnectionsManager: ssl=enable_ssl, whitelist_only=False, rng=self._rng, - enable_sync_v1=self._enable_sync_v1, - enable_sync_v1_1=self._enable_sync_v1_1, - enable_sync_v2=self._enable_sync_v2, ) + p2p_manager.add_sync_factory(SyncVersion.V1_1, SyncV11Factory(p2p_manager)) + p2p_manager.add_sync_factory(SyncVersion.V2, SyncV2Factory(p2p_manager)) + if self._enable_sync_v1: + p2p_manager.enable_sync_version(SyncVersion.V1_1) + if self._enable_sync_v2: + p2p_manager.enable_sync_version(SyncVersion.V2) return p2p_manager def _get_or_create_indexes_manager(self) -> IndexesManager: @@ -352,7 +378,9 @@ def _get_or_create_indexes_manager(self) -> IndexesManager: return self._indexes_manager - def _get_or_create_tx_storage(self, indexes: IndexesManager) -> TransactionStorage: + def _get_or_create_tx_storage(self) -> TransactionStorage: + indexes = self._get_or_create_indexes_manager() + if self._tx_storage is not None: # If a tx storage is provided, set the indexes manager to it. self._tx_storage.indexes = indexes @@ -396,9 +424,16 @@ def _get_or_create_event_storage(self) -> EventStorage: def _get_or_create_event_manager(self) -> EventManager: if self._event_manager is None: + peer_id = self._get_peer_id() + settings = self._get_or_create_settings() reactor = self._get_reactor() storage = self._get_or_create_event_storage() - factory = EventWebsocketFactory(reactor, storage) + factory = EventWebsocketFactory( + peer_id=not_none(peer_id.id), + network=settings.NETWORK_NAME, + reactor=reactor, + event_storage=storage, + ) self._event_manager = EventManager( reactor=reactor, pubsub=self._get_or_create_pubsub(), @@ -408,9 +443,11 @@ def _get_or_create_event_manager(self) -> EventManager: return self._event_manager - def _get_or_create_feature_service(self, tx_storage: TransactionStorage) -> FeatureService: + def _get_or_create_feature_service(self) -> FeatureService: + """Return the FeatureService instance set on this builder, or a new one if not set.""" if self._feature_service is None: settings = self._get_or_create_settings() + tx_storage = self._get_or_create_tx_storage() self._feature_service = FeatureService( feature_settings=settings.FEATURE_ACTIVATION, tx_storage=tx_storage @@ -418,12 +455,14 @@ def _get_or_create_feature_service(self, tx_storage: TransactionStorage) -> Feat return self._feature_service - def _get_or_create_bit_signaling_service(self, tx_storage: TransactionStorage) -> BitSignalingService: + def _get_or_create_bit_signaling_service(self) -> BitSignalingService: if self._bit_signaling_service is None: settings = self._get_or_create_settings() + tx_storage = self._get_or_create_tx_storage() + feature_service = self._get_or_create_feature_service() self._bit_signaling_service = BitSignalingService( feature_settings=settings.FEATURE_ACTIVATION, - feature_service=self._get_or_create_feature_service(tx_storage), + feature_service=feature_service, tx_storage=tx_storage, support_features=self._support_features, not_support_features=self._not_support_features, @@ -441,10 +480,33 @@ def _get_or_create_verification_service(self) -> VerificationService: def _get_or_create_vertex_verifiers(self) -> VertexVerifiers: if self._vertex_verifiers is None: settings = self._get_or_create_settings() - self._vertex_verifiers = VertexVerifiers.create_defaults(settings=settings) + feature_service = self._get_or_create_feature_service() + daa = self._get_or_create_daa() + + if self._vertex_verifiers_builder: + self._vertex_verifiers = self._vertex_verifiers_builder(settings, daa, feature_service) + else: + self._vertex_verifiers = VertexVerifiers.create_defaults( + settings=settings, + daa=daa, + feature_service=feature_service, + ) return self._vertex_verifiers + def _get_or_create_daa(self) -> DifficultyAdjustmentAlgorithm: + if self._daa is None: + settings = self._get_or_create_settings() + self._daa = DifficultyAdjustmentAlgorithm(settings=settings) + + return self._daa + + def _get_or_create_cpu_mining_service(self) -> CpuMiningService: + if self._cpu_mining_service is None: + self._cpu_mining_service = CpuMiningService() + + return self._cpu_mining_service + def use_memory(self) -> 'Builder': self.check_if_can_modify() self._storage_type = StorageType.MEMORY @@ -547,6 +609,21 @@ def set_vertex_verifiers(self, vertex_verifiers: VertexVerifiers) -> 'Builder': self._vertex_verifiers = vertex_verifiers return self + def set_vertex_verifiers_builder(self, builder: _VertexVerifiersBuilder) -> 'Builder': + self.check_if_can_modify() + self._vertex_verifiers_builder = builder + return self + + def set_daa(self, daa: DifficultyAdjustmentAlgorithm) -> 'Builder': + self.check_if_can_modify() + self._daa = daa + return self + + def set_cpu_mining_service(self, cpu_mining_service: CpuMiningService) -> 'Builder': + self.check_if_can_modify() + self._cpu_mining_service = cpu_mining_service + return self + def set_reactor(self, reactor: Reactor) -> 'Builder': self.check_if_can_modify() self._reactor = reactor @@ -567,11 +644,6 @@ def set_enable_sync_v1(self, enable_sync_v1: bool) -> 'Builder': self._enable_sync_v1 = enable_sync_v1 return self - def set_enable_sync_v1_1(self, enable_sync_v1_1: bool) -> 'Builder': - self.check_if_can_modify() - self._enable_sync_v1_1 = enable_sync_v1_1 - return self - def set_enable_sync_v2(self, enable_sync_v2: bool) -> 'Builder': self.check_if_can_modify() self._enable_sync_v2 = enable_sync_v2 @@ -587,16 +659,6 @@ def disable_sync_v1(self) -> 'Builder': self._enable_sync_v1 = False return self - def enable_sync_v1_1(self) -> 'Builder': - self.check_if_can_modify() - self._enable_sync_v1_1 = True - return self - - def disable_sync_v1_1(self) -> 'Builder': - self.check_if_can_modify() - self._enable_sync_v1_1 = False - return self - def enable_sync_v2(self) -> 'Builder': self.check_if_can_modify() self._enable_sync_v2 = True diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 58e8d83b4..8d3463176 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -21,21 +21,25 @@ from structlog import get_logger -from hathor.cli.run_node import RunNodeArgs +from hathor.cli.run_node_args import RunNodeArgs from hathor.consensus import ConsensusAlgorithm +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event import EventManager from hathor.exception import BuilderError from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature_service import FeatureService from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer_id import PeerId from hathor.p2p.utils import discover_hostname, get_genesis_short_hash from hathor.pubsub import PubSubManager +from hathor.reactor import ReactorProtocol as Reactor from hathor.stratum import StratumFactory -from hathor.util import Random, Reactor -from hathor.verification.verification_service import VerificationService, VertexVerifiers +from hathor.util import Random, not_none +from hathor.verification.verification_service import VerificationService +from hathor.verification.vertex_verifiers import VertexVerifiers from hathor.wallet import BaseWallet, HDWallet, Wallet logger = get_logger() @@ -58,11 +62,14 @@ def check_or_raise(self, condition: bool, message: str) -> None: def create_manager(self, reactor: Reactor) -> HathorManager: import hathor from hathor.conf.get_settings import get_settings, get_settings_source - from hathor.daa import TestMode, _set_test_mode + from hathor.daa import TestMode from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage from hathor.event.websocket.factory import EventWebsocketFactory from hathor.p2p.netfilter.utils import add_peer_id_blacklist from hathor.p2p.peer_discovery import BootstrapPeerDiscovery, DNSPeerDiscovery + from hathor.p2p.sync_v1.factory import SyncV11Factory + from hathor.p2p.sync_v2.factory import SyncV2Factory + from hathor.p2p.sync_version import SyncVersion from hathor.storage import RocksDBStorage from hathor.transaction.storage import ( TransactionCacheStorage, @@ -93,6 +100,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: python=python, platform=platform.platform(), settings=settings_source, + reactor_type=type(reactor).__name__, ) tx_storage: TransactionStorage @@ -150,14 +158,18 @@ def create_manager(self, reactor: Reactor) -> HathorManager: hostname = self.get_hostname() network = settings.NETWORK_NAME - enable_sync_v1 = self._args.x_enable_legacy_sync_v1_0 - enable_sync_v1_1 = not self._args.x_sync_v2_only + enable_sync_v1 = not self._args.x_sync_v2_only enable_sync_v2 = self._args.x_sync_v2_only or self._args.x_sync_bridge pubsub = PubSubManager(reactor) if self._args.x_enable_event_queue: - self.event_ws_factory = EventWebsocketFactory(reactor, event_storage) + self.event_ws_factory = EventWebsocketFactory( + peer_id=not_none(peer_id.id), + network=network, + reactor=reactor, + event_storage=event_storage + ) event_manager = EventManager( event_storage=event_storage, @@ -202,9 +214,23 @@ def create_manager(self, reactor: Reactor) -> HathorManager: not_support_features=self._args.signal_not_support ) - vertex_verifiers = VertexVerifiers.create_defaults(settings=settings) + test_mode = TestMode.DISABLED + if self._args.test_mode_tx_weight: + test_mode = TestMode.TEST_TX_WEIGHT + if self.wallet: + self.wallet.test_mode = True + + daa = DifficultyAdjustmentAlgorithm(settings=settings, test_mode=test_mode) + + vertex_verifiers = VertexVerifiers.create_defaults( + settings=settings, + daa=daa, + feature_service=self.feature_service + ) verification_service = VerificationService(verifiers=vertex_verifiers) + cpu_mining_service = CpuMiningService() + p2p_manager = ConnectionsManager( reactor, network=network, @@ -213,10 +239,13 @@ def create_manager(self, reactor: Reactor) -> HathorManager: ssl=True, whitelist_only=False, rng=Random(), - enable_sync_v1=enable_sync_v1, - enable_sync_v1_1=enable_sync_v1_1, - enable_sync_v2=enable_sync_v2, ) + p2p_manager.add_sync_factory(SyncVersion.V1_1, SyncV11Factory(p2p_manager)) + p2p_manager.add_sync_factory(SyncVersion.V2, SyncV2Factory(p2p_manager)) + if enable_sync_v1: + p2p_manager.enable_sync_version(SyncVersion.V1_1) + if enable_sync_v2: + p2p_manager.enable_sync_version(SyncVersion.V2) self.manager = HathorManager( reactor, @@ -225,6 +254,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: hostname=hostname, pubsub=pubsub, consensus_algorithm=consensus_algorithm, + daa=daa, peer_id=peer_id, tx_storage=tx_storage, p2p_manager=p2p_manager, @@ -237,12 +267,13 @@ def create_manager(self, reactor: Reactor) -> HathorManager: feature_service=self.feature_service, bit_signaling_service=bit_signaling_service, verification_service=verification_service, + cpu_mining_service=cpu_mining_service ) p2p_manager.set_manager(self.manager) if self._args.stratum: - stratum_factory = StratumFactory(self.manager) + stratum_factory = StratumFactory(self.manager, reactor=reactor) self.manager.stratum_factory = stratum_factory self.manager.metrics.stratum_factory = stratum_factory @@ -268,11 +299,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: if self._args.bootstrap: p2p_manager.add_peer_discovery(BootstrapPeerDiscovery(self._args.bootstrap)) - if self._args.test_mode_tx_weight: - _set_test_mode(TestMode.TEST_TX_WEIGHT) - if self.wallet: - self.wallet.test_mode = True - if self._args.x_rocksdb_indexes: self.log.warn('--x-rocksdb-indexes is now the default, no need to specify it') if self._args.memory_indexes: diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index 5fb42ed0a..ce92ccaa1 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -87,6 +87,7 @@ def create_resources(self) -> server.Site: DebugRejectResource, ) from hathor.feature_activation.resources.feature import FeatureResource + from hathor.healthcheck.resources import HealthcheckResource from hathor.mining.ws import MiningWebsocketFactory from hathor.p2p.resources import ( AddPeersResource, @@ -179,6 +180,7 @@ def create_resources(self) -> server.Site: (b'profiler', ProfilerResource(self.manager), root), (b'top', CPUProfilerResource(self.manager, cpu), root), (b'mempool', MempoolResource(self.manager), root), + (b'health', HealthcheckResource(self.manager), root), # mining (b'mining', MiningResource(self.manager), root), (b'getmininginfo', MiningInfoResource(self.manager), root), @@ -222,15 +224,15 @@ def create_resources(self) -> server.Site: root.putChild(b'_debug', debug_resource) resources.extend([ (b'log', DebugLogResource(), debug_resource), - (b'raise', DebugRaiseResource(), debug_resource), - (b'reject', DebugRejectResource(), debug_resource), + (b'raise', DebugRaiseResource(self.manager.reactor), debug_resource), + (b'reject', DebugRejectResource(self.manager.reactor), debug_resource), (b'print', DebugPrintResource(), debug_resource), ]) if self._args.enable_crash_api: crash_resource = Resource() root.putChild(b'_crash', crash_resource) resources.extend([ - (b'exit', DebugCrashResource(), crash_resource), + (b'exit', DebugCrashResource(self.manager.reactor), crash_resource), (b'mess_around', DebugMessAroundResource(self.manager), crash_resource), ]) diff --git a/hathor/cli/db_import.py b/hathor/cli/db_import.py index 84a51d185..a43132c40 100644 --- a/hathor/cli/db_import.py +++ b/hathor/cli/db_import.py @@ -18,7 +18,6 @@ from argparse import ArgumentParser, FileType from typing import TYPE_CHECKING, Iterator -from hathor.cli.db_export import MAGIC_HEADER from hathor.cli.run_node import RunNode if TYPE_CHECKING: @@ -46,6 +45,7 @@ def prepare(self, *, register_resources: bool = True) -> None: self.in_file = io.BufferedReader(self._args.import_file) def run(self) -> None: + from hathor.cli.db_export import MAGIC_HEADER from hathor.util import tx_progress header = self.in_file.read(len(MAGIC_HEADER)) diff --git a/hathor/cli/events_simulator/event_forwarding_websocket_factory.py b/hathor/cli/events_simulator/event_forwarding_websocket_factory.py index 15e5e70d1..5b1dad9b3 100644 --- a/hathor/cli/events_simulator/event_forwarding_websocket_factory.py +++ b/hathor/cli/events_simulator/event_forwarding_websocket_factory.py @@ -12,21 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any +from typing import TYPE_CHECKING, Any from twisted.internet.interfaces import IAddress -from hathor.cli.events_simulator.event_forwarding_websocket_protocol import EventForwardingWebsocketProtocol -from hathor.event.websocket import EventWebsocketFactory -from hathor.simulator import Simulator +from hathor.event.websocket import EventWebsocketFactory # skip-cli-import-custom-check + +if TYPE_CHECKING: + from hathor.cli.events_simulator.event_forwarding_websocket_protocol import EventForwardingWebsocketProtocol + from hathor.simulator import Simulator class EventForwardingWebsocketFactory(EventWebsocketFactory): - def __init__(self, simulator: Simulator, *args: Any, **kwargs: Any) -> None: + def __init__(self, simulator: 'Simulator', *args: Any, **kwargs: Any) -> None: self._simulator = simulator super().__init__(*args, **kwargs) - def buildProtocol(self, _: IAddress) -> EventForwardingWebsocketProtocol: + def buildProtocol(self, _: IAddress) -> 'EventForwardingWebsocketProtocol': protocol = EventForwardingWebsocketProtocol(self._simulator) protocol.factory = self return protocol diff --git a/hathor/cli/events_simulator/event_forwarding_websocket_protocol.py b/hathor/cli/events_simulator/event_forwarding_websocket_protocol.py index da3530572..cd6318a0b 100644 --- a/hathor/cli/events_simulator/event_forwarding_websocket_protocol.py +++ b/hathor/cli/events_simulator/event_forwarding_websocket_protocol.py @@ -16,17 +16,17 @@ from autobahn.websocket import ConnectionRequest -from hathor.event.websocket import EventWebsocketProtocol -from hathor.simulator import Simulator +from hathor.event.websocket import EventWebsocketProtocol # skip-cli-import-custom-check if TYPE_CHECKING: from hathor.cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory + from hathor.simulator import Simulator class EventForwardingWebsocketProtocol(EventWebsocketProtocol): factory: 'EventForwardingWebsocketFactory' - def __init__(self, simulator: Simulator) -> None: + def __init__(self, simulator: 'Simulator') -> None: self._simulator = simulator super().__init__() diff --git a/hathor/cli/events_simulator/events_simulator.py b/hathor/cli/events_simulator/events_simulator.py index a915585de..600232429 100644 --- a/hathor/cli/events_simulator/events_simulator.py +++ b/hathor/cli/events_simulator/events_simulator.py @@ -44,8 +44,8 @@ def execute(args: Namespace) -> None: os.environ['HATHOR_CONFIG_YAML'] = UNITTESTS_SETTINGS_FILEPATH from hathor.cli.events_simulator.event_forwarding_websocket_factory import EventForwardingWebsocketFactory from hathor.cli.events_simulator.scenario import Scenario + from hathor.reactor import get_global_reactor from hathor.simulator import Simulator - from hathor.util import reactor try: scenario = Scenario[args.scenario] @@ -53,6 +53,7 @@ def execute(args: Namespace) -> None: possible_scenarios = [scenario.name for scenario in Scenario] raise ValueError(f'Invalid scenario "{args.scenario}". Choose one of {possible_scenarios}') from e + reactor = get_global_reactor() log = logger.new() simulator = Simulator(args.seed) simulator.start() @@ -66,6 +67,8 @@ def execute(args: Namespace) -> None: forwarding_ws_factory = EventForwardingWebsocketFactory( simulator=simulator, + peer_id='simulator_peer_id', + network='simulator_network', reactor=reactor, event_storage=event_ws_factory._event_storage ) @@ -80,7 +83,7 @@ def execute(args: Namespace) -> None: log.info('Started simulating events', scenario=args.scenario, seed=simulator.seed) - forwarding_ws_factory.start(stream_id='simulator') + forwarding_ws_factory.start(stream_id='simulator_stream_id') scenario.simulate(simulator, manager) reactor.listenTCP(args.port, site) reactor.run() diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index ea8f16528..5d029e309 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -25,6 +25,7 @@ class Scenario(Enum): SINGLE_CHAIN_ONE_BLOCK = 'SINGLE_CHAIN_ONE_BLOCK' SINGLE_CHAIN_BLOCKS_AND_TRANSACTIONS = 'SINGLE_CHAIN_BLOCKS_AND_TRANSACTIONS' REORG = 'REORG' + UNVOIDED_TRANSACTION = 'UNVOIDED_TRANSACTION' def simulate(self, simulator: 'Simulator', manager: 'HathorManager') -> None: simulate_fns = { @@ -32,6 +33,7 @@ def simulate(self, simulator: 'Simulator', manager: 'HathorManager') -> None: Scenario.SINGLE_CHAIN_ONE_BLOCK: simulate_single_chain_one_block, Scenario.SINGLE_CHAIN_BLOCKS_AND_TRANSACTIONS: simulate_single_chain_blocks_and_transactions, Scenario.REORG: simulate_reorg, + Scenario.UNVOIDED_TRANSACTION: simulate_unvoided_transaction, } simulate_fn = simulate_fns[self] @@ -44,15 +46,14 @@ def simulate_only_load(simulator: 'Simulator', _manager: 'HathorManager') -> Non def simulate_single_chain_one_block(simulator: 'Simulator', manager: 'HathorManager') -> None: - from tests.utils import add_new_blocks + from hathor.simulator.utils import add_new_blocks add_new_blocks(manager, 1) simulator.run(60) def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manager: 'HathorManager') -> None: - from hathor import daa from hathor.conf.get_settings import get_settings - from tests.utils import add_new_blocks, gen_new_tx + from hathor.simulator.utils import add_new_blocks, gen_new_tx settings = get_settings() assert manager.wallet is not None @@ -62,13 +63,13 @@ def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manage simulator.run(60) tx = gen_new_tx(manager, address, 1000) - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() assert manager.propagate_tx(tx, fails_silently=False) simulator.run(60) tx = gen_new_tx(manager, address, 2000) - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() assert manager.propagate_tx(tx, fails_silently=False) simulator.run(60) @@ -79,7 +80,7 @@ def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manage def simulate_reorg(simulator: 'Simulator', manager: 'HathorManager') -> None: from hathor.simulator import FakeConnection - from tests.utils import add_new_blocks + from hathor.simulator.utils import add_new_blocks builder = simulator.get_default_builder() manager2 = simulator.create_peer(builder) @@ -93,3 +94,49 @@ def simulate_reorg(simulator: 'Simulator', manager: 'HathorManager') -> None: connection = FakeConnection(manager, manager2) simulator.add_connection(connection) simulator.run(60) + + +def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManager') -> None: + from hathor.conf.get_settings import get_settings + from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx + from hathor.util import not_none + + settings = get_settings() + assert manager.wallet is not None + address = manager.wallet.get_unused_address(mark_as_used=False) + + add_new_blocks(manager, settings.REWARD_SPEND_MIN_BLOCKS + 1) + simulator.run(60) + + # A tx is created with weight 19.0005 + tx = gen_new_tx(manager, address, 1000) + tx.weight = 19.0005 + tx.update_hash() + assert manager.propagate_tx(tx, fails_silently=False) + simulator.run(60) + + # A clone is created with a greater timestamp and a lower weight. It's a voided twin tx. + tx2 = tx.clone(include_metadata=False) + tx2.timestamp += 60 + tx2.weight = 19 + tx2.update_hash() + assert manager.propagate_tx(tx2, fails_silently=False) + simulator.run(60) + + # Only the second tx is voided + assert not tx.get_metadata().voided_by + assert tx2.get_metadata().voided_by + + # We add a block confirming the second tx, increasing its acc weight + block = add_new_block(manager, propagate=False) + block.parents = [ + block.parents[0], + settings.GENESIS_TX1_HASH, + not_none(tx2.hash), + ] + assert manager.propagate_tx(block, fails_silently=False) + simulator.run(60) + + # The first tx gets voided and the second gets unvoided + assert tx.get_metadata().voided_by + assert not tx2.get_metadata().voided_by diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 9a373be90..1fbd8a927 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -33,9 +33,10 @@ def signal_handler(sig, frame): def worker(q_in, q_out): + from hathor.mining.cpu_mining_service import CpuMiningService signal.signal(signal.SIGINT, signal_handler) block, start, end, sleep_seconds = q_in.get() - block.start_mining(start, end, sleep_seconds=sleep_seconds) + CpuMiningService().start_mining(block, start=start, end=end, sleep_seconds=sleep_seconds) q_out.put(block) @@ -134,7 +135,14 @@ def execute(args: Namespace) -> None: block.nonce, block.weight)) try: - block.verify_without_storage() + from hathor.conf.get_settings import get_settings + from hathor.daa import DifficultyAdjustmentAlgorithm + from hathor.verification.verification_service import VerificationService, VertexVerifiers + settings = get_settings() + daa = DifficultyAdjustmentAlgorithm(settings=settings) + verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa) + verification_service = VerificationService(verifiers=verifiers) + verification_service.verify_without_storage(block) except HathorError: print('[{}] ERROR: Block has not been pushed because it is not valid.'.format(datetime.datetime.now())) else: diff --git a/hathor/cli/multisig_spend.py b/hathor/cli/multisig_spend.py index 24281f233..6b7fcdc57 100644 --- a/hathor/cli/multisig_spend.py +++ b/hathor/cli/multisig_spend.py @@ -27,6 +27,7 @@ def create_parser() -> ArgumentParser: def execute(args: Namespace) -> None: + from hathor.mining.cpu_mining_service import CpuMiningService from hathor.transaction import Transaction from hathor.transaction.scripts import MultiSig @@ -36,7 +37,7 @@ def execute(args: Namespace) -> None: input_data = MultiSig.create_input_data(bytes.fromhex(args.redeem_script), signatures) tx.inputs[0].data = input_data - tx.resolve() + CpuMiningService().resolve(tx) print('Transaction after POW: ', tx.get_struct().hex()) diff --git a/hathor/cli/nginx_config.py b/hathor/cli/nginx_config.py index 18a6f4afe..974d0f74c 100644 --- a/hathor/cli/nginx_config.py +++ b/hathor/cli/nginx_config.py @@ -17,8 +17,6 @@ from enum import Enum from typing import Any, NamedTuple, Optional, TextIO -from hathor.cli.openapi_json import get_openapi_dict - BASE_PATH = os.path.join(os.path.dirname(__file__), 'nginx_files') @@ -26,6 +24,7 @@ def get_openapi(src_file: Optional[TextIO] = None) -> dict[str, Any]: """ Open and parse the json file or generate OpenAPI dict on-the-fly """ if src_file is None: + from hathor.cli.openapi_json import get_openapi_dict return get_openapi_dict() else: return json.load(src_file) diff --git a/hathor/cli/openapi_files/openapi_base.json b/hathor/cli/openapi_files/openapi_base.json index 8c381595b..4ebc82659 100644 --- a/hathor/cli/openapi_files/openapi_base.json +++ b/hathor/cli/openapi_files/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.57.0" + "version": "0.58.0" }, "consumes": [ "application/json" diff --git a/hathor/cli/openapi_files/register.py b/hathor/cli/openapi_files/register.py index 733f56848..77dc29b87 100644 --- a/hathor/cli/openapi_files/register.py +++ b/hathor/cli/openapi_files/register.py @@ -14,7 +14,7 @@ from typing import TypeVar -from hathor.api_util import Resource +from hathor.api_util import Resource # skip-cli-import-custom-check _registered_resources: list[type[Resource]] = [] @@ -36,6 +36,7 @@ def get_registered_resources() -> list[type[Resource]]: """ import hathor.event.resources.event # noqa: 401 import hathor.feature_activation.resources.feature # noqa: 401 + import hathor.healthcheck.resources.healthcheck # noqa: 401 import hathor.p2p.resources # noqa: 401 import hathor.profiler.resources # noqa: 401 import hathor.stratum.resources # noqa: 401 diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 00ab40956..55b9c1730 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -15,27 +15,26 @@ import os import sys from argparse import SUPPRESS, ArgumentParser, Namespace -from typing import Any, Callable, Optional +from typing import TYPE_CHECKING, Any, Callable, Optional from pydantic import ValidationError from structlog import get_logger -from hathor.cli.run_node_args import RunNodeArgs -from hathor.conf import TESTNET_SETTINGS_FILEPATH, HathorSettings -from hathor.exception import PreInitializationError -from hathor.feature_activation.feature import Feature - logger = get_logger() # LOGGING_CAPTURE_STDOUT = True +if TYPE_CHECKING: + from hathor.cli.run_node_args import RunNodeArgs + class RunNode: - UNSAFE_ARGUMENTS: list[tuple[str, Callable[[RunNodeArgs], bool]]] = [ + UNSAFE_ARGUMENTS: list[tuple[str, Callable[['RunNodeArgs'], bool]]] = [ ('--test-mode-tx-weight', lambda args: bool(args.test_mode_tx_weight)), ('--enable-crash-api', lambda args: bool(args.enable_crash_api)), ('--x-sync-bridge', lambda args: bool(args.x_sync_bridge)), ('--x-sync-v2-only', lambda args: bool(args.x_sync_v2_only)), - ('--x-enable-event-queue', lambda args: bool(args.x_enable_event_queue)) + ('--x-enable-event-queue', lambda args: bool(args.x_enable_event_queue)), + ('--x-asyncio-reactor', lambda args: bool(args.x_asyncio_reactor)) ] @classmethod @@ -45,6 +44,7 @@ def create_parser(cls) -> ArgumentParser: Arguments must also be added to hathor.cli.run_node_args.RunNodeArgs """ from hathor.cli.util import create_parser + from hathor.feature_activation.feature import Feature parser = create_parser() parser.add_argument('--hostname', help='Hostname used to be accessed by other peers') @@ -102,8 +102,6 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--sentry-dsn', help='Sentry DSN') parser.add_argument('--enable-debug-api', action='store_true', help='Enable _debug/* endpoints') parser.add_argument('--enable-crash-api', action='store_true', help='Enable _crash/* endpoints') - parser.add_argument('--x-enable-legacy-sync-v1_0', action='store_true', help='Enable sync-v1.0, will not ' - 'disable sync-v1.1') v2args = parser.add_mutually_exclusive_group() v2args.add_argument('--x-sync-bridge', action='store_true', help='Enable support for running both sync protocols. DO NOT ENABLE, IT WILL BREAK.') @@ -120,6 +118,8 @@ def create_parser(cls) -> ArgumentParser: help=f'Signal support for a feature. One of {possible_features}') parser.add_argument('--signal-not-support', default=[], action='append', choices=possible_features, help=f'Signal not support for a feature. One of {possible_features}') + parser.add_argument('--x-asyncio-reactor', action='store_true', + help='Use asyncio reactor instead of Twisted\'s default.') return parser def prepare(self, *, register_resources: bool = True) -> None: @@ -141,7 +141,8 @@ def prepare(self, *, register_resources: bool = True) -> None: self.check_unsafe_arguments() self.check_python_version() - from hathor.util import reactor + from hathor.reactor import initialize_global_reactor + reactor = initialize_global_reactor(use_asyncio_reactor=self._args.x_asyncio_reactor) self.reactor = reactor from hathor.builder import CliBuilder, ResourcesBuilder @@ -191,6 +192,7 @@ def prepare(self, *, register_resources: bool = True) -> None: wallet=self.manager.wallet, rocksdb_storage=getattr(builder, 'rocksdb_storage', None), stratum_factory=self.manager.stratum_factory, + feature_service=self.manager._feature_service ) def start_sentry_if_possible(self) -> None: @@ -347,6 +349,9 @@ def check_python_version(self) -> None: ])) def __init__(self, *, argv=None): + from hathor.cli.run_node_args import RunNodeArgs + from hathor.conf import TESTNET_SETTINGS_FILEPATH + from hathor.conf.get_settings import get_settings self.log = logger.new() if argv is None: @@ -364,8 +369,9 @@ def __init__(self, *, argv=None): os.environ['HATHOR_CONFIG_YAML'] = TESTNET_SETTINGS_FILEPATH try: - HathorSettings() + get_settings() except (TypeError, ValidationError) as e: + from hathor.exception import PreInitializationError raise PreInitializationError( 'An error was found while trying to initialize HathorSettings. See above for details.' ) from e diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index eb9ddcd0c..897555fbb 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -16,8 +16,8 @@ from pydantic import Extra -from hathor.feature_activation.feature import Feature -from hathor.utils.pydantic import BaseModel +from hathor.feature_activation.feature import Feature # skip-cli-import-custom-check +from hathor.utils.pydantic import BaseModel # skip-cli-import-custom-check class RunNodeArgs(BaseModel, extra=Extra.allow): @@ -63,7 +63,6 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): sentry_dsn: Optional[str] enable_debug_api: bool enable_crash_api: bool - x_enable_legacy_sync_v1_0: bool x_sync_bridge: bool x_sync_v2_only: bool x_localhost_only: bool @@ -73,3 +72,4 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): config_yaml: Optional[str] signal_support: set[Feature] signal_not_support: set[Feature] + x_asyncio_reactor: bool diff --git a/hathor/cli/stratum_mining.py b/hathor/cli/stratum_mining.py index 06bd53203..799a210dc 100644 --- a/hathor/cli/stratum_mining.py +++ b/hathor/cli/stratum_mining.py @@ -30,8 +30,8 @@ def create_parser() -> ArgumentParser: def execute(args: Namespace) -> None: from hathor.crypto.util import decode_address + from hathor.reactor import get_global_reactor from hathor.stratum import StratumClient - from hathor.util import reactor from hathor.wallet.exceptions import InvalidAddress address = None @@ -43,7 +43,8 @@ def execute(args: Namespace) -> None: print('The given address is invalid') sys.exit(-1) - miner = StratumClient(proc_count=args.nproc, address=address) + reactor = get_global_reactor() + miner = StratumClient(proc_count=args.nproc, address=address, reactor=reactor) miner.start() point = TCP4ClientEndpoint(reactor, args.host, args.port) connectProtocol(point, miner) diff --git a/hathor/cli/twin_tx.py b/hathor/cli/twin_tx.py index a57a2c8f6..f57c4ee97 100644 --- a/hathor/cli/twin_tx.py +++ b/hathor/cli/twin_tx.py @@ -34,6 +34,7 @@ def create_parser() -> ArgumentParser: def execute(args: Namespace) -> None: + from hathor.mining.cpu_mining_service import CpuMiningService from hathor.transaction import Transaction # Get tx you want to create a twin @@ -89,7 +90,7 @@ def execute(args: Namespace) -> None: if args.weight: twin.weight = args.weight - twin.resolve() + CpuMiningService().resolve(twin) if args.human: print(twin.to_json()) else: diff --git a/hathor/cli/util.py b/hathor/cli/util.py index 5a5244422..38555a294 100644 --- a/hathor/cli/util.py +++ b/hathor/cli/util.py @@ -157,6 +157,9 @@ def setup_logging( else: handlers = ['pretty'] + # Flag to enable debug level for both sync-v1 and sync-v2. + debug_sync = False and debug + # See: https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema logging.config.dictConfig({ 'version': 1, @@ -207,6 +210,14 @@ def setup_logging( 'handlers': handlers, 'level': 'DEBUG' if debug else 'INFO', }, + 'hathor.p2p.sync_v1': { + 'handlers': handlers, + 'level': 'DEBUG' if debug_sync else 'INFO', + }, + 'hathor.p2p.sync_v2': { + 'handlers': handlers, + 'level': 'DEBUG' if debug_sync else 'INFO', + } } }) diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index 5f36d8b6b..792d95ce5 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -58,31 +58,31 @@ enable_usage=True, default_threshold=30240, features={ - Feature.NOP_FEATURE_1: Criteria( + Feature.NOP_FEATURE_4: Criteria( bit=0, - start_height=3_144_960, # N (right now the best block is 3093551 on testnet) - timeout_height=3_225_600, # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height=3_265_920, # N + 3 * 40320 (6 weeks after the start) + start_height=3_386_880, # N (right now the best block is 3_346_600 on testnet) + timeout_height=3_467_520, # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height=3_507_840, # N + 3 * 40320 (6 weeks after the start) lock_in_on_timeout=False, - version='0.56.0', + version='0.57.0', signal_support_by_default=True ), - Feature.NOP_FEATURE_2: Criteria( + Feature.NOP_FEATURE_5: Criteria( bit=1, - start_height=3_144_960, # N (right now the best block is 3093551 on testnet) - timeout_height=3_225_600, # N + 2 * 40320 (4 weeks after the start) + start_height=3_386_880, # N (right now the best block is 3_346_600 on testnet) + timeout_height=3_467_520, # N + 2 * 40320 (4 weeks after the start) minimum_activation_height=0, lock_in_on_timeout=True, - version='0.56.0', + version='0.57.0', signal_support_by_default=False ), - Feature.NOP_FEATURE_3: Criteria( + Feature.NOP_FEATURE_6: Criteria( bit=2, - start_height=3_144_960, # N (right now the best block is 3093551 on testnet) - timeout_height=3_225_600, # N + 2 * 40320 (4 weeks after the start) + start_height=3_386_880, # N (right now the best block is 3_346_600 on testnet) + timeout_height=3_467_520, # N + 2 * 40320 (4 weeks after the start) minimum_activation_height=0, lock_in_on_timeout=False, - version='0.56.0', + version='0.57.0', signal_support_by_default=False ) } diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index e2e1da6d3..ca799c299 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -40,29 +40,31 @@ FEATURE_ACTIVATION: enable_usage: true default_threshold: 30_240 # 30240 = 75% of evaluation_interval (40320) features: - NOP_FEATURE_1: + #### Second Phased Testing features #### + + NOP_FEATURE_4: bit: 0 - start_height: 3_144_960 # N (right now the best block is 3093551 on testnet) - timeout_height: 3_225_600 # N + 2 * 40320 (4 weeks after the start) - minimum_activation_height: 3_265_920 # N + 3 * 40320 (6 weeks after the start) + start_height: 3_386_880 # N (right now the best block is 3_346_600 on testnet) + timeout_height: 3_467_520 # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height: 3_507_840 # N + 3 * 40320 (6 weeks after the start) lock_in_on_timeout: false - version: 0.56.0 + version: 0.57.0 signal_support_by_default: true - NOP_FEATURE_2: + NOP_FEATURE_5: bit: 1 - start_height: 3_144_960 # N (right now the best block is 3093551 on testnet) - timeout_height: 3_225_600 # N + 2 * 40320 (4 weeks after the start) + start_height: 3_386_880 # N (right now the best block is 3_346_600 on testnet) + timeout_height: 3_467_520 # N + 2 * 40320 (4 weeks after the start) minimum_activation_height: 0 lock_in_on_timeout: true - version: 0.56.0 + version: 0.57.0 signal_support_by_default: false - NOP_FEATURE_3: + NOP_FEATURE_6: bit: 2 - start_height: 3_144_960 # N (right now the best block is 3093551 on testnet) - timeout_height: 3_225_600 # N + 2 * 40320 (4 weeks after the start) + start_height: 3_386_880 # N (right now the best block is 3_346_600 on testnet) + timeout_height: 3_467_520 # N + 2 * 40320 (4 weeks after the start) minimum_activation_height: 0 lock_in_on_timeout: false - version: 0.56.0 + version: 0.57.0 signal_support_by_default: false diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index 18a6da20d..9c8c0d83a 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -216,7 +216,9 @@ def update_voided_info(self, block: Block) -> None: if common_block not in heads: self.context.mark_as_reorg(common_block) else: - storage.update_best_block_tips_cache([not_none(blk.hash) for blk in heads]) + best_block_tips = [not_none(blk.hash) for blk in heads] + best_block_tips.append(not_none(block.hash)) + storage.update_best_block_tips_cache(best_block_tips) if not meta.voided_by: self.context.mark_as_reorg(common_block) diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index 307dbe0ff..e0a1ad5b5 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -136,8 +136,7 @@ def _unsafe_update(self, base: BaseTransaction) -> None: reorg_size=reorg_size) # finally signal an index update for all affected transactions - sorted_txs_affected = sorted(context.txs_affected, key=lambda tx: not_none(tx.timestamp), reverse=True) - for tx_affected in sorted_txs_affected: + for tx_affected in _sorted_affected_txs(context.txs_affected): assert tx_affected.storage is not None assert tx_affected.storage.indexes is not None tx_affected.storage.indexes.update(tx_affected) @@ -167,3 +166,17 @@ def filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: set[byt if not (self.soft_voided_tx_ids & tx3_voided_by): ret.add(h) return ret + + +def _sorted_affected_txs(affected_txs: set[BaseTransaction]) -> list[BaseTransaction]: + """ + Sort affected txs by voided first, then descending timestamp (reverse topological order). + This is useful for generating Reliable Integration events. + """ + def sorter(tx: BaseTransaction) -> tuple[bool, int]: + meta = tx.get_metadata() + is_voided = bool(meta.voided_by) + + return is_voided, not_none(tx.timestamp) + + return sorted(affected_txs, key=sorter, reverse=True) diff --git a/hathor/daa.py b/hathor/daa.py index b812d1a39..4d8fc7413 100644 --- a/hathor/daa.py +++ b/hathor/daa.py @@ -21,11 +21,11 @@ from enum import IntFlag from math import log -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, ClassVar, Optional from structlog import get_logger -from hathor.conf import HathorSettings +from hathor.conf.settings import HathorSettings from hathor.profiler import get_cpu_profiler from hathor.util import iwindows @@ -33,12 +33,8 @@ from hathor.transaction import Block, Transaction logger = get_logger() -settings = HathorSettings() cpu = get_cpu_profiler() -MIN_BLOCK_WEIGHT = settings.MIN_BLOCK_WEIGHT -AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS - class TestMode(IntFlag): __test__ = False @@ -49,182 +45,179 @@ class TestMode(IntFlag): TEST_ALL_WEIGHT = 3 -TEST_MODE = TestMode.DISABLED - - -def _set_test_mode(mode: TestMode) -> None: - global TEST_MODE - logger.debug('change DAA test mode', from_mode=TEST_MODE.name, to_mode=mode.name) - TEST_MODE = mode - - -@cpu.profiler(key=lambda block: 'calculate_block_difficulty!{}'.format(block.hash.hex())) -def calculate_block_difficulty(block: 'Block') -> float: - """ Calculate block weight according to the ascendents of `block`, using calculate_next_weight.""" - if TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: - return 1.0 - - if block.is_genesis: - return MIN_BLOCK_WEIGHT - - return calculate_next_weight(block.get_block_parent(), block.timestamp) - - -def calculate_next_weight(parent_block: 'Block', timestamp: int) -> float: - """ Calculate the next block weight, aka DAA/difficulty adjustment algorithm. - - The algorithm used is described in [RFC 22](https://gitlab.com/HathorNetwork/rfcs/merge_requests/22). - - The weight must not be less than `MIN_BLOCK_WEIGHT`. - """ - if TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: - return 1.0 - - from hathor.transaction import sum_weights - - root = parent_block - N = min(2 * settings.BLOCK_DIFFICULTY_N_BLOCKS, parent_block.get_height() - 1) - K = N // 2 - T = AVG_TIME_BETWEEN_BLOCKS - S = 5 - if N < 10: - return MIN_BLOCK_WEIGHT - - blocks: list['Block'] = [] - while len(blocks) < N + 1: - blocks.append(root) - root = root.get_block_parent() - assert root is not None - - # TODO: revise if this assertion can be safely removed - assert blocks == sorted(blocks, key=lambda tx: -tx.timestamp) - blocks = list(reversed(blocks)) - - assert len(blocks) == N + 1 - solvetimes, weights = zip(*( - (block.timestamp - prev_block.timestamp, block.weight) - for prev_block, block in iwindows(blocks, 2) - )) - assert len(solvetimes) == len(weights) == N, f'got {len(solvetimes)}, {len(weights)} expected {N}' - - sum_solvetimes = 0.0 - logsum_weights = 0.0 - - prefix_sum_solvetimes = [0] - for st in solvetimes: - prefix_sum_solvetimes.append(prefix_sum_solvetimes[-1] + st) - - # Loop through N most recent blocks. N is most recently solved block. - for i in range(K, N): - solvetime = solvetimes[i] - weight = weights[i] - x = (prefix_sum_solvetimes[i + 1] - prefix_sum_solvetimes[i - K]) / K - ki = K * (x - T)**2 / (2 * T * T) - ki = max(1, ki / S) - sum_solvetimes += ki * solvetime - logsum_weights = sum_weights(logsum_weights, log(ki, 2) + weight) - - weight = logsum_weights - log(sum_solvetimes, 2) + log(T, 2) - - # Apply weight decay - weight -= get_weight_decay_amount(timestamp - parent_block.timestamp) - - # Apply minimum weight - if weight < MIN_BLOCK_WEIGHT: - weight = MIN_BLOCK_WEIGHT - - return weight - - -def get_weight_decay_amount(distance: int) -> float: - """Return the amount to be reduced in the weight of the block.""" - if not settings.WEIGHT_DECAY_ENABLED: - return 0.0 - if distance < settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: - return 0.0 - - dt = distance - settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - - # Calculate the number of windows. - n_windows = 1 + (dt // settings.WEIGHT_DECAY_WINDOW_SIZE) - return n_windows * settings.WEIGHT_DECAY_AMOUNT - - -def minimum_tx_weight(tx: 'Transaction') -> float: - """ Returns the minimum weight for the param tx - The minimum is calculated by the following function: - - w = alpha * log(size, 2) + 4.0 + 4.0 - ---------------- - 1 + k / amount - - :param tx: tx to calculate the minimum weight - :type tx: :py:class:`hathor.transaction.transaction.Transaction` - - :return: minimum weight for the tx - :rtype: float - """ - # In test mode we don't validate the minimum weight for tx - # We do this to allow generating many txs for testing - if TEST_MODE & TestMode.TEST_TX_WEIGHT: - return 1.0 - - if tx.is_genesis: - return settings.MIN_TX_WEIGHT - - tx_size = len(tx.get_struct()) - - # We need to take into consideration the decimal places because it is inside the amount. - # For instance, if one wants to transfer 20 HTRs, the amount will be 2000. - # Max below is preventing division by 0 when handling authority methods that have no outputs - amount = max(1, tx.sum_outputs) / (10 ** settings.DECIMAL_PLACES) - weight = ( - + settings.MIN_TX_WEIGHT_COEFFICIENT * log(tx_size, 2) - + 4 / (1 + settings.MIN_TX_WEIGHT_K / amount) + 4 - ) - - # Make sure the calculated weight is at least the minimum - weight = max(weight, settings.MIN_TX_WEIGHT) - - return weight - - -def get_tokens_issued_per_block(height: int) -> int: - """Return the number of tokens issued (aka reward) per block of a given height.""" - if settings.BLOCKS_PER_HALVING is None: - assert settings.MINIMUM_TOKENS_PER_BLOCK == settings.INITIAL_TOKENS_PER_BLOCK - return settings.MINIMUM_TOKENS_PER_BLOCK - - number_of_halvings = (height - 1) // settings.BLOCKS_PER_HALVING - number_of_halvings = max(0, number_of_halvings) - - if number_of_halvings > settings.MAXIMUM_NUMBER_OF_HALVINGS: - return settings.MINIMUM_TOKENS_PER_BLOCK - - amount = settings.INITIAL_TOKENS_PER_BLOCK // (2**number_of_halvings) - amount = max(amount, settings.MINIMUM_TOKENS_PER_BLOCK) - return amount - +class DifficultyAdjustmentAlgorithm: + # TODO: This singleton is temporary, and only used in PeerId. It should be removed from there, and then from here. + singleton: ClassVar[Optional['DifficultyAdjustmentAlgorithm']] = None -def get_mined_tokens(height: int) -> int: - """Return the number of tokens mined in total at height - """ - assert settings.BLOCKS_PER_HALVING is not None - number_of_halvings = (height - 1) // settings.BLOCKS_PER_HALVING - number_of_halvings = max(0, number_of_halvings) + def __init__(self, *, settings: HathorSettings, test_mode: TestMode = TestMode.DISABLED) -> None: + self._settings = settings + self.AVG_TIME_BETWEEN_BLOCKS = self._settings.AVG_TIME_BETWEEN_BLOCKS + self.MIN_BLOCK_WEIGHT = self._settings.MIN_BLOCK_WEIGHT + self.TEST_MODE = test_mode + DifficultyAdjustmentAlgorithm.singleton = self - blocks_in_this_halving = height - number_of_halvings * settings.BLOCKS_PER_HALVING + @cpu.profiler(key=lambda _, block: 'calculate_block_difficulty!{}'.format(block.hash.hex())) + def calculate_block_difficulty(self, block: 'Block') -> float: + """ Calculate block weight according to the ascendents of `block`, using calculate_next_weight.""" + if self.TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: + return 1.0 + + if block.is_genesis: + return self.MIN_BLOCK_WEIGHT + + return self.calculate_next_weight(block.get_block_parent(), block.timestamp) + + def calculate_next_weight(self, parent_block: 'Block', timestamp: int) -> float: + """ Calculate the next block weight, aka DAA/difficulty adjustment algorithm. - tokens_per_block = settings.INITIAL_TOKENS_PER_BLOCK - mined_tokens = 0 + The algorithm used is described in [RFC 22](https://gitlab.com/HathorNetwork/rfcs/merge_requests/22). + + The weight must not be less than `MIN_BLOCK_WEIGHT`. + """ + if self.TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: + return 1.0 + + from hathor.transaction import sum_weights + + root = parent_block + N = min(2 * self._settings.BLOCK_DIFFICULTY_N_BLOCKS, parent_block.get_height() - 1) + K = N // 2 + T = self.AVG_TIME_BETWEEN_BLOCKS + S = 5 + if N < 10: + return self.MIN_BLOCK_WEIGHT + + blocks: list['Block'] = [] + while len(blocks) < N + 1: + blocks.append(root) + root = root.get_block_parent() + assert root is not None + + # TODO: revise if this assertion can be safely removed + assert blocks == sorted(blocks, key=lambda tx: -tx.timestamp) + blocks = list(reversed(blocks)) + + assert len(blocks) == N + 1 + solvetimes, weights = zip(*( + (block.timestamp - prev_block.timestamp, block.weight) + for prev_block, block in iwindows(blocks, 2) + )) + assert len(solvetimes) == len(weights) == N, f'got {len(solvetimes)}, {len(weights)} expected {N}' + + sum_solvetimes = 0.0 + logsum_weights = 0.0 + + prefix_sum_solvetimes = [0] + for st in solvetimes: + prefix_sum_solvetimes.append(prefix_sum_solvetimes[-1] + st) + + # Loop through N most recent blocks. N is most recently solved block. + for i in range(K, N): + solvetime = solvetimes[i] + weight = weights[i] + x = (prefix_sum_solvetimes[i + 1] - prefix_sum_solvetimes[i - K]) / K + ki = K * (x - T)**2 / (2 * T * T) + ki = max(1, ki / S) + sum_solvetimes += ki * solvetime + logsum_weights = sum_weights(logsum_weights, log(ki, 2) + weight) + + weight = logsum_weights - log(sum_solvetimes, 2) + log(T, 2) + + # Apply weight decay + weight -= self.get_weight_decay_amount(timestamp - parent_block.timestamp) + + # Apply minimum weight + if weight < self.MIN_BLOCK_WEIGHT: + weight = self.MIN_BLOCK_WEIGHT + + return weight + + def get_weight_decay_amount(self, distance: int) -> float: + """Return the amount to be reduced in the weight of the block.""" + if not self._settings.WEIGHT_DECAY_ENABLED: + return 0.0 + if distance < self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: + return 0.0 + + dt = distance - self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE + + # Calculate the number of windows. + n_windows = 1 + (dt // self._settings.WEIGHT_DECAY_WINDOW_SIZE) + return n_windows * self._settings.WEIGHT_DECAY_AMOUNT + + def minimum_tx_weight(self, tx: 'Transaction') -> float: + """ Returns the minimum weight for the param tx + The minimum is calculated by the following function: + + w = alpha * log(size, 2) + 4.0 + 4.0 + ---------------- + 1 + k / amount + + :param tx: tx to calculate the minimum weight + :type tx: :py:class:`hathor.transaction.transaction.Transaction` + + :return: minimum weight for the tx + :rtype: float + """ + # In test mode we don't validate the minimum weight for tx + # We do this to allow generating many txs for testing + if self.TEST_MODE & TestMode.TEST_TX_WEIGHT: + return 1.0 + + if tx.is_genesis: + return self._settings.MIN_TX_WEIGHT + + tx_size = len(tx.get_struct()) + + # We need to take into consideration the decimal places because it is inside the amount. + # For instance, if one wants to transfer 20 HTRs, the amount will be 2000. + # Max below is preventing division by 0 when handling authority methods that have no outputs + amount = max(1, tx.sum_outputs) / (10 ** self._settings.DECIMAL_PLACES) + weight = ( + + self._settings.MIN_TX_WEIGHT_COEFFICIENT * log(tx_size, 2) + + 4 / (1 + self._settings.MIN_TX_WEIGHT_K / amount) + 4 + ) + + # Make sure the calculated weight is at least the minimum + weight = max(weight, self._settings.MIN_TX_WEIGHT) + + return weight + + def get_tokens_issued_per_block(self, height: int) -> int: + """Return the number of tokens issued (aka reward) per block of a given height.""" + if self._settings.BLOCKS_PER_HALVING is None: + assert self._settings.MINIMUM_TOKENS_PER_BLOCK == self._settings.INITIAL_TOKENS_PER_BLOCK + return self._settings.MINIMUM_TOKENS_PER_BLOCK + + number_of_halvings = (height - 1) // self._settings.BLOCKS_PER_HALVING + number_of_halvings = max(0, number_of_halvings) + + if number_of_halvings > self._settings.MAXIMUM_NUMBER_OF_HALVINGS: + return self._settings.MINIMUM_TOKENS_PER_BLOCK + + amount = self._settings.INITIAL_TOKENS_PER_BLOCK // (2**number_of_halvings) + amount = max(amount, self._settings.MINIMUM_TOKENS_PER_BLOCK) + return amount + + def get_mined_tokens(self, height: int) -> int: + """Return the number of tokens mined in total at height + """ + assert self._settings.BLOCKS_PER_HALVING is not None + number_of_halvings = (height - 1) // self._settings.BLOCKS_PER_HALVING + number_of_halvings = max(0, number_of_halvings) + + blocks_in_this_halving = height - number_of_halvings * self._settings.BLOCKS_PER_HALVING + + tokens_per_block = self._settings.INITIAL_TOKENS_PER_BLOCK + mined_tokens = 0 - # Sum the past halvings - for _ in range(number_of_halvings): - mined_tokens += settings.BLOCKS_PER_HALVING * tokens_per_block - tokens_per_block //= 2 - tokens_per_block = max(tokens_per_block, settings.MINIMUM_TOKENS_PER_BLOCK) + # Sum the past halvings + for _ in range(number_of_halvings): + mined_tokens += self._settings.BLOCKS_PER_HALVING * tokens_per_block + tokens_per_block //= 2 + tokens_per_block = max(tokens_per_block, self._settings.MINIMUM_TOKENS_PER_BLOCK) - # Sum the blocks in the current halving - mined_tokens += blocks_in_this_halving * tokens_per_block + # Sum the blocks in the current halving + mined_tokens += blocks_in_this_halving * tokens_per_block - return mined_tokens + return mined_tokens diff --git a/hathor/debug_resources.py b/hathor/debug_resources.py index 6e050a63c..5d0707b2f 100644 --- a/hathor/debug_resources.py +++ b/hathor/debug_resources.py @@ -24,7 +24,7 @@ from hathor.cli.openapi_files.register import register_resource from hathor.exception import HathorError from hathor.manager import HathorManager -from hathor.util import reactor +from hathor.reactor import ReactorProtocol from hathor.utils.zope import asserted_cast logger = get_logger() @@ -54,6 +54,10 @@ class DebugRaiseResource(Resource): } default_msg = 'exception raised for debugging purposes' + def __init__(self, reactor: ReactorProtocol) -> None: + super().__init__() + self._reactor = reactor + def run(self, exc_cls: type[BaseException], msg: str) -> None: raise exc_cls(msg) @@ -63,7 +67,7 @@ def render_GET(self, request: Request) -> bytes: assert exc_cls_name in self.exc_class_map exc_cls = self.exc_class_map[exc_cls_name] msg = get_arg_default(raw_args, 'msg', self.default_msg) - threaded_reactor = asserted_cast(IReactorFromThreads, reactor) + threaded_reactor = asserted_cast(IReactorFromThreads, self._reactor) threaded_reactor.callFromThread(self.run, exc_cls, msg) return b'OK: no side-effects\n' @@ -188,7 +192,7 @@ def render_GET(self, request: Request) -> bytes: mess = get_arg_default(get_args(request), 'with', self.default_mess) assert mess in self.mess_map mess_func = self.mess_map[mess] - threaded_reactor = asserted_cast(IReactorFromThreads, reactor) + threaded_reactor = asserted_cast(IReactorFromThreads, self.manager.reactor) threaded_reactor.callFromThread(mess_func) return b'OK: database yanked, full-node will break\n' @@ -208,6 +212,10 @@ class DebugCrashResource(Resource): } } + def __init__(self, reactor: ReactorProtocol) -> None: + super().__init__() + self._reactor = reactor + def run(self, code: int) -> None: # XXX: sys.exit will raise a SystemExit exception that get's trapped by twisted # os._exit will bypass that by exiting directly, note that no cleanup methods will be called @@ -215,5 +223,5 @@ def run(self, code: int) -> None: def render_GET(self, request: Request) -> bytes: code = get_arg_default(get_args(request), 'code', -1) - reactor.callLater(1.0, self.run, code) + self._reactor.callLater(1.0, self.run, code) return b'OK: full-node will exit and probably break database\n' diff --git a/hathor/event/event_manager.py b/hathor/event/event_manager.py index 4ac536e6c..6306707c6 100644 --- a/hathor/event/event_manager.py +++ b/hathor/event/event_manager.py @@ -23,8 +23,9 @@ from hathor.event.storage import EventStorage from hathor.event.websocket import EventWebsocketFactory from hathor.pubsub import EventArguments, HathorEvents, PubSubManager +from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction -from hathor.util import Reactor, not_none, progress +from hathor.util import not_none, progress from hathor.utils.iter import batch_iterator logger = get_logger() @@ -70,7 +71,7 @@ def __init__( pubsub: PubSubManager, reactor: Reactor, event_ws_factory: Optional[EventWebsocketFactory] = None, - ): + ) -> None: self.log = logger.new() self._reactor = reactor @@ -233,7 +234,6 @@ def _create_event( """Actually creates a BaseEvent.""" return BaseEvent.from_event_arguments( event_id=0 if self._last_event is None else self._last_event.id + 1, - peer_id=self._peer_id, timestamp=self._reactor.seconds(), event_type=event_type, event_args=event_args, diff --git a/hathor/event/model/base_event.py b/hathor/event/model/base_event.py index c64700fba..8f15fca88 100644 --- a/hathor/event/model/base_event.py +++ b/hathor/event/model/base_event.py @@ -23,8 +23,6 @@ class BaseEvent(BaseModel, use_enum_values=True): - # Full node id, because different full nodes can have different sequences of events - peer_id: str # Event unique id, determines event order id: NonNegativeInt # Timestamp in which the event was emitted, this follows the unix_timestamp format, it's only informative, events @@ -42,7 +40,6 @@ class BaseEvent(BaseModel, use_enum_values=True): @classmethod def from_event_arguments( cls, - peer_id: str, event_id: NonNegativeInt, timestamp: float, event_type: EventType, @@ -53,7 +50,6 @@ def from_event_arguments( event_data_type = event_type.data_type() return cls( - peer_id=peer_id, id=event_id, timestamp=timestamp, type=event_type, @@ -66,7 +62,7 @@ def data_type_must_match_event_type(cls, v, values): event_type = EventType(values['type']) expected_data_type = event_type.data_type() - if type(v) != expected_data_type: + if type(v) is not expected_data_type: raise ValueError('event data type does not match event type') return v diff --git a/hathor/event/websocket/factory.py b/hathor/event/websocket/factory.py index 9d024bdad..2bc2724e7 100644 --- a/hathor/event/websocket/factory.py +++ b/hathor/event/websocket/factory.py @@ -21,7 +21,8 @@ from hathor.event.storage import EventStorage from hathor.event.websocket.protocol import EventWebsocketProtocol from hathor.event.websocket.response import EventResponse, InvalidRequestType -from hathor.util import Reactor, not_none +from hathor.reactor import ReactorProtocol as Reactor +from hathor.util import not_none logger = get_logger() @@ -40,9 +41,18 @@ class EventWebsocketFactory(WebSocketServerFactory): # The unique stream ID _stream_id: Optional[str] = None - def __init__(self, reactor: Reactor, event_storage: EventStorage): + def __init__( + self, + *, + peer_id: str, + network: str, + reactor: Reactor, + event_storage: EventStorage + ) -> None: super().__init__() self.log = logger.new() + self._peer_id = peer_id + self._network = network self._reactor = reactor self._event_storage = event_storage self._connections: set[EventWebsocketProtocol] = set() @@ -113,6 +123,8 @@ def _send_event_to_connection(self, connection: EventWebsocketProtocol, event: B assert self._latest_event_id is not None, '_latest_event_id must be set.' response = EventResponse( + peer_id=self._peer_id, + network=self._network, event=event, latest_event_id=self._latest_event_id, stream_id=not_none(self._stream_id) diff --git a/hathor/event/websocket/response.py b/hathor/event/websocket/response.py index 78bbe4c65..b8f83016b 100644 --- a/hathor/event/websocket/response.py +++ b/hathor/event/websocket/response.py @@ -29,12 +29,16 @@ class EventResponse(Response): Args: type: The type of the response. + peer_id: Full node id, because different full nodes can have different sequences of events. + network: The network for which this event was generated. event: The event. latest_event_id: The ID of the latest event known by the server. stream_id: The ID of the current stream. """ type: str = Field(default='EVENT', const=True) + peer_id: str + network: str event: BaseEvent latest_event_id: NonNegativeInt stream_id: str diff --git a/hathor/exception.py b/hathor/exception.py index f898b3a66..1d3d42547 100644 --- a/hathor/exception.py +++ b/hathor/exception.py @@ -23,6 +23,16 @@ class BuilderError(Exception): pass +class BlockTemplateError(Exception): + """Base class for exceptions generating block template.""" + pass + + +class BlockTemplateTimestampError(BlockTemplateError): + """Raised when there is no timestamp available to prepare a block template.""" + pass + + class InvalidNewTransaction(HathorError): """Raised when a new received tx/block is not valid. """ diff --git a/hathor/feature_activation/bit_signaling_service.py b/hathor/feature_activation/bit_signaling_service.py index 88a1d38b4..a8f7f09a4 100644 --- a/hathor/feature_activation/bit_signaling_service.py +++ b/hathor/feature_activation/bit_signaling_service.py @@ -54,6 +54,10 @@ def __init__( self._validate_support_intersection() def start(self) -> None: + """ + Log information related to bit signaling. Must be called after the storage is ready and migrations have + been applied. + """ best_block = self._tx_storage.get_best_block() self._warn_non_signaling_features(best_block) diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index 9cfd99ec9..c056b21d1 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -23,6 +23,12 @@ class Feature(Enum): should NOT be changed either, as configuration uses them for setting feature activation criteria. """ + # First Phased Testing features NOP_FEATURE_1 = 'NOP_FEATURE_1' NOP_FEATURE_2 = 'NOP_FEATURE_2' NOP_FEATURE_3 = 'NOP_FEATURE_3' + + # Second Phased Testing features + NOP_FEATURE_4 = 'NOP_FEATURE_4' + NOP_FEATURE_5 = 'NOP_FEATURE_5' + NOP_FEATURE_6 = 'NOP_FEATURE_6' diff --git a/hathor/feature_activation/feature_service.py b/hathor/feature_activation/feature_service.py index 9d3d82c28..4d44dd5c2 100644 --- a/hathor/feature_activation/feature_service.py +++ b/hathor/feature_activation/feature_service.py @@ -12,28 +12,75 @@ # See the License for the specific language governing permissions and # limitations under the License. +from dataclasses import dataclass +from typing import TYPE_CHECKING, TypeAlias + from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_description import FeatureDescription from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings -from hathor.transaction import Block -from hathor.transaction.storage import TransactionStorage + +if TYPE_CHECKING: + from hathor.transaction import Block + from hathor.transaction.storage import TransactionStorage + + +@dataclass(frozen=True, slots=True) +class BlockIsSignaling: + """Represent that a block is correctly signaling support for all currently mandatory features.""" + pass + + +@dataclass(frozen=True, slots=True) +class BlockIsMissingSignal: + """Represent that a block is not signaling support for at least one currently mandatory feature.""" + feature: Feature + + +BlockSignalingState: TypeAlias = BlockIsSignaling | BlockIsMissingSignal class FeatureService: __slots__ = ('_feature_settings', '_tx_storage') - def __init__(self, *, feature_settings: FeatureSettings, tx_storage: TransactionStorage) -> None: + def __init__(self, *, feature_settings: FeatureSettings, tx_storage: 'TransactionStorage') -> None: self._feature_settings = feature_settings self._tx_storage = tx_storage - def is_feature_active(self, *, block: Block, feature: Feature) -> bool: + def is_feature_active(self, *, block: 'Block', feature: Feature) -> bool: """Returns whether a Feature is active at a certain block.""" state = self.get_state(block=block, feature=feature) return state == FeatureState.ACTIVE - def get_state(self, *, block: Block, feature: Feature) -> FeatureState: + def is_signaling_mandatory_features(self, block: 'Block') -> BlockSignalingState: + """ + Return whether a block is signaling features that are mandatory, that is, any feature currently in the + MUST_SIGNAL phase. + """ + bit_counts = block.get_feature_activation_bit_counts() + height = block.get_height() + offset_to_boundary = height % self._feature_settings.evaluation_interval + remaining_blocks = self._feature_settings.evaluation_interval - offset_to_boundary - 1 + descriptions = self.get_bits_description(block=block) + + must_signal_features = ( + feature for feature, description in descriptions.items() + if description.state is FeatureState.MUST_SIGNAL + ) + + for feature in must_signal_features: + criteria = self._feature_settings.features[feature] + threshold = criteria.get_threshold(self._feature_settings) + count = bit_counts[criteria.bit] + missing_signals = threshold - count + + if missing_signals > remaining_blocks: + return BlockIsMissingSignal(feature=feature) + + return BlockIsSignaling() + + def get_state(self, *, block: 'Block', feature: Feature) -> FeatureState: """Returns the state of a feature at a certain block. Uses block metadata to cache states.""" # per definition, the genesis block is in the DEFINED state for all features @@ -54,6 +101,9 @@ def get_state(self, *, block: Block, feature: Feature) -> FeatureState: previous_boundary_block = self._get_ancestor_at_height(block=block, height=previous_boundary_height) previous_boundary_state = self.get_state(block=previous_boundary_block, feature=feature) + # We cache _and save_ the state of the previous boundary block that we just got. + previous_boundary_block.set_feature_state(feature=feature, state=previous_boundary_state, save=True) + if offset_to_boundary != 0: return previous_boundary_state @@ -63,14 +113,16 @@ def get_state(self, *, block: Block, feature: Feature) -> FeatureState: previous_state=previous_boundary_state ) - block.update_feature_state(feature=feature, state=new_state) + # We cache the just calculated state of the current block _without saving it_, as it may still be unverified, + # so we cannot persist its metadata. That's why we cache and save the previous boundary block above. + block.set_feature_state(feature=feature, state=new_state) return new_state def _calculate_new_state( self, *, - boundary_block: Block, + boundary_block: 'Block', feature: Feature, previous_state: FeatureState ) -> FeatureState: @@ -136,7 +188,7 @@ def _calculate_new_state( raise ValueError(f'Unknown previous state: {previous_state}') - def get_bits_description(self, *, block: Block) -> dict[Feature, FeatureDescription]: + def get_bits_description(self, *, block: 'Block') -> dict[Feature, FeatureDescription]: """Returns the criteria definition and feature state for all features at a certain block.""" return { feature: FeatureDescription( @@ -146,7 +198,7 @@ def get_bits_description(self, *, block: Block) -> dict[Feature, FeatureDescript for feature, criteria in self._feature_settings.features.items() } - def _get_ancestor_at_height(self, *, block: Block, height: int) -> Block: + def _get_ancestor_at_height(self, *, block: 'Block', height: int) -> 'Block': """ Given a block, returns its ancestor at a specific height. Uses the height index if the block is in the best blockchain, or search iteratively otherwise. @@ -158,13 +210,14 @@ def _get_ancestor_at_height(self, *, block: Block, height: int) -> Block: metadata = block.get_metadata() if not metadata.voided_by and (ancestor := self._tx_storage.get_transaction_by_height(height)): + from hathor.transaction import Block assert isinstance(ancestor, Block) return ancestor return _get_ancestor_iteratively(block=block, ancestor_height=height) -def _get_ancestor_iteratively(*, block: Block, ancestor_height: int) -> Block: +def _get_ancestor_iteratively(*, block: 'Block', ancestor_height: int) -> 'Block': """Given a block, returns its ancestor at a specific height by iterating over its ancestors. This is slow.""" # TODO: there are further optimizations to be done here, the latest common block height could be persisted in # metadata, so we could still use the height index if the requested height is before that height. diff --git a/hathor/healthcheck/resources/__init__.py b/hathor/healthcheck/resources/__init__.py new file mode 100644 index 000000000..514b99ff0 --- /dev/null +++ b/hathor/healthcheck/resources/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.healthcheck.resources.healthcheck import HealthcheckResource + +__all__ = [ + 'HealthcheckResource', +] diff --git a/hathor/healthcheck/resources/healthcheck.py b/hathor/healthcheck/resources/healthcheck.py new file mode 100644 index 000000000..5e9afcb9f --- /dev/null +++ b/hathor/healthcheck/resources/healthcheck.py @@ -0,0 +1,197 @@ +import asyncio + +from healthcheck import Healthcheck, HealthcheckCallbackResponse, HealthcheckInternalComponent, HealthcheckStatus + +from hathor.api_util import Resource, get_arg_default, get_args +from hathor.cli.openapi_files.register import register_resource +from hathor.manager import HathorManager +from hathor.util import json_dumpb + + +async def sync_healthcheck(manager: HathorManager) -> HealthcheckCallbackResponse: + healthy, reason = manager.is_sync_healthy() + + return HealthcheckCallbackResponse( + status=HealthcheckStatus.PASS if healthy else HealthcheckStatus.FAIL, + output=reason or 'Healthy', + ) + + +@register_resource +class HealthcheckResource(Resource): + isLeaf = True + + def __init__(self, manager: HathorManager): + self.manager = manager + + def render_GET(self, request): + """ GET request /health/ + Returns the health status of the fullnode + + The 'strict_status_code' argument can be used to return 200 even if the fullnode is unhealthy. + This can be useful when integrating with tools that could prefer to pass the response code only + in case the response is 200. + + :rtype: string (json) + """ + raw_args = get_args(request) + strict_status_code = get_arg_default(raw_args, 'strict_status_code', '0') == '1' + + sync_component = HealthcheckInternalComponent( + name='sync', + ) + sync_component.add_healthcheck(lambda: sync_healthcheck(self.manager)) + + healthcheck = Healthcheck(name='hathor-core', components=[sync_component]) + status = asyncio.get_event_loop().run_until_complete(healthcheck.run()) + + if strict_status_code: + request.setResponseCode(200) + else: + status_code = status.get_http_status_code() + request.setResponseCode(status_code) + + return json_dumpb(status.to_json()) + + +HealthcheckResource.openapi = { + '/health': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '10r/s', + 'burst': 10, + 'delay': 5 + } + ], + 'per-ip': [ + { + 'rate': '1r/s', + 'burst': 3, + 'delay': 2 + } + ] + }, + 'get': { + 'tags': ['healthcheck'], + 'operationId': 'get', + 'summary': 'Health status of the fullnode', + 'description': ''' +Returns 200 if the fullnode should be considered healthy. + +Returns 503 otherwise. The response will contain the components that were considered for the healthcheck +and the reason why they were unhealthy. + +Returning 503 with a response body is not the standard behavior for our API, but it was chosen because +most healthcheck tools expect a 503 response code to indicate that the service is unhealthy. + +Optionally, there is a query parameter 'strict_status_code' that can be used to return 200 even if the fullnode +is unhealthy. When its value is 1, the response will always be 200. + +We currently perform 2 checks in the sync mechanism for the healthcheck: +1. Whether the fullnode has recent block activity, i.e. if the fullnode has blocks with recent timestamps. +2. Whether the fullnode has at least one synced peer + ''', + 'parameters': [ + { + 'name': 'strict_status_code', + 'in': 'query', + 'description': 'Enables strict status code. If set to 1, the response will always be 200.', + 'required': False, + 'schema': { + 'type': 'string' + } + }, + ], + 'responses': { + '200': { + 'description': 'Healthy', + 'content': { + 'application/json': { + 'examples': { + 'healthy': { + 'summary': 'Healthy node', + 'value': { + 'status': 'pass', + 'description': 'Hathor-core v0.56.0', + 'checks': { + 'sync': [ + { + 'componentName': 'sync', + 'componentType': 'internal', + 'status': 'pass', + 'output': 'Healthy' + } + ] + } + } + } + } + } + } + }, + '503': { + 'description': 'Unhealthy', + 'content': { + 'application/json': { + 'examples': { + 'no_recent_activity': { + 'summary': 'Node with no recent activity', + 'value': { + 'status': 'fail', + 'description': 'Hathor-core v0.56.0', + 'checks': { + 'sync': [ + { + 'componentName': 'sync', + 'componentType': 'internal', + 'status': 'fail', + 'output': 'Node doesn\'t have recent blocks' + } + ] + } + } + }, + 'no_synced_peer': { + 'summary': 'Node with no synced peer', + 'value': { + 'status': 'fail', + 'description': 'Hathor-core v0.56.0', + 'checks': { + 'sync': [ + { + 'componentName': 'sync', + 'componentType': 'internal', + 'status': 'fail', + 'output': 'Node doesn\'t have a synced peer' + } + ] + } + } + }, + 'peer_best_block_far_ahead': { + 'summary': 'Peer with best block too far ahead', + 'value': { + 'status': 'fail', + 'description': 'Hathor-core v0.56.0', + 'checks': { + 'sync': [ + { + 'componentName': 'sync', + 'componentType': 'internal', + 'status': 'fail', + 'output': 'Node\'s peer with highest height is too far ahead.' + } + ] + } + } + } + } + } + } + }, + } + } + } +} diff --git a/hathor/indexes/deps_index.py b/hathor/indexes/deps_index.py deleted file mode 100644 index fc8d56a87..000000000 --- a/hathor/indexes/deps_index.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from abc import abstractmethod -from typing import TYPE_CHECKING, Iterator - -from hathor.indexes.base_index import BaseIndex -from hathor.indexes.scope import Scope -from hathor.transaction import BaseTransaction, Block - -if TYPE_CHECKING: # pragma: no cover - from hathor.transaction.storage import TransactionStorage - - -# XXX: this arbitrary height limit must fit in a u32 (4-bytes unsigned), so it can be stored easily on rocksdb -INF_HEIGHT: int = 2**32 - 1 - -SCOPE = Scope( - include_blocks=True, - include_txs=True, - include_voided=True, - include_partial=True -) - - -def get_requested_from_height(tx: BaseTransaction) -> int: - """Return the height of the block that requested (directly or indirectly) the download of this transaction. - - If this value cannot be determined (either for the lack of a metadata or otherwise), the INF_HEIGHT constant is - returned instead. So there will always be a value (it's never None). - - This is used to help prioritize which transaction to start from next on sync-v2 when syncing the transactions - after downloading a chain of blocks. - """ - assert tx.storage is not None - if tx.is_block: - assert isinstance(tx, Block) - return tx.get_height() - first_block = tx.get_metadata().first_block - if first_block is None: - # XXX: consensus did not run yet to update first_block, what should we do? - # I'm defaulting the height to `inf` (practically), this should make it heightest priority when - # choosing which transactions to fetch next - return INF_HEIGHT - block = tx.storage.get_transaction(first_block) - assert isinstance(block, Block) - return block.get_height() - - -class DepsIndex(BaseIndex): - """ Index of dependencies between transactions - - This index exists to accelerate queries related to the partial validation mechanism needed by sync-v2. More - specifically these queries: - - - Which transactions need to be downloaded next? That is, all the transactions which are a reverse dependency of - all the transactions that aren't fully validated; - - Which transactions can we validate next? That is, all the transactions which are not fully validated but can be - fully validated because all of its dependencies have been downloaded and are now fully validated; - - These queries would normally need traversals that are at the very **least** O(N) with N being the total number of - transactions in the blockchain. The specific speed up with the index varies but should at **most** O(M) with M - being the total number of transactions in the index. - - Terminology: - - - a tx is ready: said when all of its dependencies are in storage and are fully-valid - - (direct) dependencies of tx: all transactions that tx needs to be validated, which are its parents and its inputs - - reverse dependencies of tx: all transactions that depend on tx for being validated, that is if tx1 depends on tx2 - and tx3, and tx4 depends on tx3 and tx5, the reverse dependencies of tx3 would be tx1 and tx4. - - needed transactions: all transactions which need to be downloaded (we also store which tx asked for a transaction - to be downloaded) - - - Examples: - - - Consider the following complete DAG (it doesn't matter if a tx is a block or not): - - +----------------v - A -----> B -----> C ----> D - +--> E --^ ^ - +------------+ - - These are all the dependency relations (direct/directly is implied, as shown on the first examples): - - - A does not have any (direct) dependency - - A is a reverse dependency of B and E - - B (directly) depends on A and E - - B is a reverse dependency of C and D - - C depends on B and E - - C is a reverse dependency of D - - D depends on B and C - - D does not have any reverse dependency - - E depends on A - - E is a reverse dependency of B and C - - These are some alternative ways to express some of those relations: - - - the list of reverse dependencies of A is [B, E] - - the list of (direct) dependencies of B is [A, E] - - - The "needed" and "ready" concepts should be easier to understand, but are harder to ascii-draw, thus I skipped - them. - """ - - def get_scope(self) -> Scope: - return SCOPE - - def init_loop_step(self, tx: BaseTransaction) -> None: - self.add_tx(tx) - - def update(self, tx: BaseTransaction) -> None: - assert tx.hash is not None - tx_meta = tx.get_metadata() - if tx_meta.validation.is_fully_connected(): - self.remove_ready_for_validation(tx.hash) - - @abstractmethod - def add_tx(self, tx: BaseTransaction, partial: bool = True) -> None: - """Update 'deps' and 'needed' sub-indexes, removing them when necessary (i.e. validation is complete). - - Note: this method is idempotent. - """ - raise NotImplementedError - - @abstractmethod - def del_tx(self, tx: BaseTransaction) -> None: - """Update 'deps' and 'needed' sub-indexes, removing them when necessary (i.e. validation is complete). - - Note: this method is idempotent. - """ - raise NotImplementedError - - @abstractmethod - def remove_ready_for_validation(self, tx: bytes) -> None: - """Removes from ready for validation set. - """ - raise NotImplementedError - - @abstractmethod - def next_ready_for_validation(self, tx_storage: 'TransactionStorage', *, dry_run: bool = False) -> Iterator[bytes]: - """Yields and removes all txs ready for validation even if they become ready while iterating. - """ - raise NotImplementedError - - @abstractmethod - def iter(self) -> Iterator[bytes]: - """Iterate through all hashes depended by any tx or block.""" - raise NotImplementedError - - @abstractmethod - def _iter_needed_txs(self) -> Iterator[bytes]: - """Iterate through all txs that need to be downloaded, this is only used in tests, and thus is 'internal'.""" - raise NotImplementedError - - @abstractmethod - def known_children(self, tx: BaseTransaction) -> list[bytes]: - """Return the hashes of all reverse dependencies that are children of the given tx. - - That is, they depend on `tx` because they are children of `tx`, and not because `tx` is an input. This is - useful for pre-filling the children metadata, which would otherwise only be updated when - `update_initial_metadata` is called on the child-tx. - """ - raise NotImplementedError - - @abstractmethod - def has_needed_tx(self) -> bool: - """Whether there is any tx on the needed tx index.""" - raise NotImplementedError - - @abstractmethod - def is_tx_needed(self, tx: bytes) -> bool: - """Whether a tx is in the requested tx list.""" - raise NotImplementedError - - @abstractmethod - def remove_from_needed_index(self, tx: bytes) -> None: - """Remove tx from needed txs index, tx doesn't need to be in the index.""" - raise NotImplementedError - - @abstractmethod - def iter_next_needed_txs(self) -> Iterator[bytes]: - """Iterate over the next needed transactions.""" - raise NotImplementedError diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index bc37f2339..967ba7225 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -21,7 +21,6 @@ from hathor.indexes.address_index import AddressIndex from hathor.indexes.base_index import BaseIndex -from hathor.indexes.deps_index import DepsIndex from hathor.indexes.height_index import HeightIndex from hathor.indexes.info_index import InfoIndex from hathor.indexes.mempool_tips_index import MempoolTipsIndex @@ -61,7 +60,6 @@ class IndexesManager(ABC): sorted_txs: TimestampIndex height: HeightIndex - deps: Optional[DepsIndex] mempool_tips: Optional[MempoolTipsIndex] addresses: Optional[AddressIndex] tokens: Optional[TokensIndex] @@ -90,7 +88,6 @@ def iter_all_indexes(self) -> Iterator[BaseIndex]: self.sorted_blocks, self.sorted_txs, self.height, - self.deps, self.mempool_tips, self.addresses, self.tokens, @@ -112,11 +109,6 @@ def enable_utxo_index(self) -> None: """Enable UTXO index. It does nothing if it has already been enabled.""" raise NotImplementedError - @abstractmethod - def enable_deps_index(self) -> None: - """Enable dependencies index. It does nothing if it has already been enabled.""" - raise NotImplementedError - @abstractmethod def enable_mempool_index(self) -> None: """Enable mempool index. It does nothing if it has already been enabled.""" @@ -194,8 +186,6 @@ def update(self, tx: BaseTransaction) -> None: """ # XXX: this _should_ be here, but it breaks some tests, for now this is done explicitly in hathor.manager # self.mempool_tips.update(tx) - if self.deps: - self.deps.update(tx) if self.utxo: self.utxo.update(tx) @@ -226,10 +216,6 @@ def add_tx(self, tx: BaseTransaction) -> bool: if self.tokens: self.tokens.add_tx(tx) - # XXX: this method is idempotent and has no result - if self.deps: - self.deps.add_tx(tx) - # We need to check r1 as well to make sure we don't count twice the transactions/blocks that are # just changing from voided to executed or vice-versa if r1 and r3: @@ -272,10 +258,6 @@ def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: if self.tokens: self.tokens.del_tx(tx) - # XXX: this method is idempotent and has no result - if self.deps: - self.deps.del_tx(tx) - class MemoryIndexesManager(IndexesManager): def __init__(self) -> None: @@ -298,7 +280,6 @@ def __init__(self) -> None: self.utxo = None self.height = MemoryHeightIndex() self.mempool_tips = None - self.deps = None # XXX: this has to be at the end of __init__, after everything has been initialized self.__init_checks__() @@ -323,11 +304,6 @@ def enable_mempool_index(self) -> None: if self.mempool_tips is None: self.mempool_tips = MemoryMempoolTipsIndex() - def enable_deps_index(self) -> None: - from hathor.indexes.memory_deps_index import MemoryDepsIndex - if self.deps is None: - self.deps = MemoryDepsIndex() - class RocksDBIndexesManager(IndexesManager): def __init__(self, rocksdb_storage: 'RocksDBStorage') -> None: @@ -352,7 +328,6 @@ def __init__(self, rocksdb_storage: 'RocksDBStorage') -> None: self.tokens = None self.utxo = None self.mempool_tips = None - self.deps = None # XXX: this has to be at the end of __init__, after everything has been initialized self.__init_checks__() @@ -377,9 +352,3 @@ def enable_mempool_index(self) -> None: if self.mempool_tips is None: # XXX: use of RocksDBMempoolTipsIndex is very slow and was suspended self.mempool_tips = MemoryMempoolTipsIndex() - - def enable_deps_index(self) -> None: - from hathor.indexes.memory_deps_index import MemoryDepsIndex - if self.deps is None: - # XXX: use of RocksDBDepsIndex is currently suspended until it is fixed - self.deps = MemoryDepsIndex() diff --git a/hathor/indexes/memory_deps_index.py b/hathor/indexes/memory_deps_index.py deleted file mode 100644 index 8d9d74a9b..000000000 --- a/hathor/indexes/memory_deps_index.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING, Iterator, Optional - -from structlog import get_logger - -from hathor.indexes.deps_index import DepsIndex, get_requested_from_height -from hathor.transaction import BaseTransaction -from hathor.util import not_none - -if TYPE_CHECKING: # pragma: no cover - from hathor.transaction.storage import TransactionStorage - -logger = get_logger() - - -class MemoryDepsIndex(DepsIndex): - # Reverse dependency mapping - _rev_dep_index: dict[bytes, set[bytes]] - - # Ready to be validated cache - _txs_with_deps_ready: set[bytes] - - # Next to be downloaded - # - Key: hash of the tx to be downloaded - # - Value[0]: height - # - Value[1]: hash of the tx waiting for the download - _needed_txs_index: dict[bytes, tuple[int, bytes]] - - def __init__(self): - self.log = logger.new() - self.force_clear() - - def get_db_name(self) -> Optional[str]: - return None - - def force_clear(self) -> None: - self._rev_dep_index = {} - self._txs_with_deps_ready = set() - self._needed_txs_index = {} - - def add_tx(self, tx: BaseTransaction, partial: bool = True) -> None: - validation = tx.get_metadata().validation - if validation.is_fully_connected(): - # discover if new txs are ready because of this tx - self._update_new_deps_ready(tx) - # finally remove from rev deps - self._del_from_deps_index(tx) - elif not partial: - raise ValueError('partial=False will only accept fully connected transactions') - else: - self._add_deps(tx) - self._add_needed(tx) - - def del_tx(self, tx: BaseTransaction) -> None: - self._del_from_deps_index(tx) - - def _update_new_deps_ready(self, tx: BaseTransaction) -> None: - """Go over the reverse dependencies of tx and check if any of them are now ready to be validated. - - This is also idempotent. - """ - assert tx.hash is not None - assert tx.storage is not None - for candidate_hash in self._rev_dep_index.get(tx.hash, []): - with tx.storage.allow_partially_validated_context(): - candidate_tx = tx.storage.get_transaction(candidate_hash) - if candidate_tx.is_ready_for_validation(): - self._txs_with_deps_ready.add(candidate_hash) - - def _add_deps(self, tx: BaseTransaction) -> None: - """This method is idempotent, because self.update needs it to be indempotent.""" - assert tx.hash is not None - for dep in tx.get_all_dependencies(): - if dep not in self._rev_dep_index: - self._rev_dep_index[dep] = set() - self._rev_dep_index[dep].add(tx.hash) - - def _del_from_deps_index(self, tx: BaseTransaction) -> None: - """This method is idempotent, because self.update needs it to be indempotent.""" - assert tx.hash is not None - for dep in tx.get_all_dependencies(): - if dep not in self._rev_dep_index: - continue - rev_deps = self._rev_dep_index[dep] - if tx.hash in rev_deps: - rev_deps.remove(tx.hash) - if not rev_deps: - del self._rev_dep_index[dep] - - def remove_ready_for_validation(self, tx: bytes) -> None: - """ Removes from ready for validation set. - """ - self._txs_with_deps_ready.discard(tx) - - def next_ready_for_validation(self, tx_storage: 'TransactionStorage', *, dry_run: bool = False) -> Iterator[bytes]: - if dry_run: - cur_ready = self._txs_with_deps_ready.copy() - else: - cur_ready, self._txs_with_deps_ready = self._txs_with_deps_ready, set() - while cur_ready: - with tx_storage.allow_partially_validated_context(): - sorted_cur_ready = sorted(cur_ready, key=lambda tx_hash: tx_storage.get_transaction(tx_hash).timestamp) - yield from sorted_cur_ready - if dry_run: - cur_ready = self._txs_with_deps_ready - cur_ready - else: - cur_ready, self._txs_with_deps_ready = self._txs_with_deps_ready, set() - - def iter(self) -> Iterator[bytes]: - yield from self._rev_dep_index.keys() - - def _iter_needed_txs(self) -> Iterator[bytes]: - yield from self._needed_txs_index.keys() - - def _get_rev_deps(self, tx: bytes) -> frozenset[bytes]: - """Get all txs that depend on the given tx (i.e. its reverse depdendencies).""" - return frozenset(self._rev_dep_index.get(tx, set())) - - def known_children(self, tx: BaseTransaction) -> list[bytes]: - assert tx.hash is not None - assert tx.storage is not None - with tx.storage.allow_partially_validated_context(): - it_rev_deps = map(tx.storage.get_transaction, self._get_rev_deps(tx.hash)) - return [not_none(rev.hash) for rev in it_rev_deps if tx.hash in rev.parents] - - # needed-txs-index methods: - - def has_needed_tx(self) -> bool: - return bool(self._needed_txs_index) - - def is_tx_needed(self, tx: bytes) -> bool: - return tx in self._needed_txs_index - - def remove_from_needed_index(self, tx: bytes) -> None: - self._needed_txs_index.pop(tx, None) - - def iter_next_needed_txs(self) -> Iterator[bytes]: - for tx_hash, _ in self._needed_txs_index.items(): - yield tx_hash - - def _add_needed(self, tx: BaseTransaction) -> None: - """This method is idempotent, because self.update needs it to be indempotent.""" - assert tx.hash is not None - assert tx.storage is not None - tx_storage = tx.storage - - height = get_requested_from_height(tx) - self.log.debug('add needed deps', tx=tx.hash_hex, height=height, type=type(tx).__name__) - # get_all_dependencies is needed to ensure that we get the inputs that aren't reachable through parents alone, - # this can happen for inputs that have not been confirmed as of the block the confirms the block or transaction - # that we're adding the dependencies of - for dep_hash in tx.get_all_dependencies(): - # It may happen that we have one of the dependencies already, so just add the ones we don't - # have. We should add at least one dependency, otherwise this tx should be full validated - with tx_storage.allow_partially_validated_context(): - tx_exists = tx_storage.transaction_exists(dep_hash) - if not tx_exists: - self.log.debug('tx parent is needed', tx=dep_hash.hex()) - self._needed_txs_index[dep_hash] = (height, not_none(tx.hash)) - - # also, remove the given transaction from needed, because we already have it - self._needed_txs_index.pop(tx.hash, None) diff --git a/hathor/indexes/rocksdb_deps_index.py b/hathor/indexes/rocksdb_deps_index.py deleted file mode 100644 index 780299ee8..000000000 --- a/hathor/indexes/rocksdb_deps_index.py +++ /dev/null @@ -1,356 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dataclasses import dataclass -from enum import Enum -from typing import TYPE_CHECKING, Iterator, Optional - -from structlog import get_logger - -from hathor.indexes.deps_index import DepsIndex, get_requested_from_height -from hathor.indexes.rocksdb_utils import RocksDBIndexUtils, incr_key -from hathor.transaction import BaseTransaction -from hathor.util import not_none - -if TYPE_CHECKING: - import rocksdb - - from hathor.transaction.storage import TransactionStorage - -logger = get_logger() - -_CF_NAME_DEPS_INDEX = b'deps-index' -_DB_NAME: str = 'deps' - - -class _Tag(Enum): - READY = 0x01 - REVERSE = 0x02 - NEEDED = 0x03 - - -@dataclass -class _KeyAny: - tag: _Tag - tx_hash: Optional[bytes] - tx_dep_hash: Optional[bytes] - - -class RocksDBDepsIndex(DepsIndex, RocksDBIndexUtils): - """ Index of dependencies between transactions - - Terms: - - - ready: [tx], many tx, all of which are ready to be validated because their dependencies are fully-valid - - reverse: tx_dep -> [tx], tx_dep is needed by many tx, in order for them to be validated - - needed: [tx_dep], many tx_dep, all of which need to be downloaded (we store which tx asked for a tx_dep) - - This index uses the following key-value formats: - - key_ready = [tag=01][tx.hash] value='' - |--1b--||--32b--| - - key_reverse = [tag=02][tx_dep.hash][tx.hash] value='' - |--1b--||--32b------||--32b--| - - key_needed = [tag=03][tx_dep.hash] value=[height][tx.hash] - |--1b--||--32b------| |--4b--||--32b--| - - It works nicely because rocksdb uses a tree sorted by key under the hood. - """ - - def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None, _force: bool = False) -> None: - if not _force: - # See: https://github.com/HathorNetwork/hathor-core/issues/412 - raise TypeError('This class should not be used') - self.log = logger.new() - RocksDBIndexUtils.__init__(self, db, cf_name or _CF_NAME_DEPS_INDEX) - - def get_db_name(self) -> Optional[str]: - # XXX: we don't need it to be parametrizable, so this is fine - return _DB_NAME - - def force_clear(self) -> None: - self.clear() - - def _to_key_ready(self, tx_hash: Optional[bytes] = None) -> bytes: - """Make a key for accessing READY txs 'set'""" - key = bytearray() - key.append(_Tag.READY.value) - if tx_hash is None: - assert len(key) == 1 - return bytes(key) - key.extend(tx_hash) - assert len(key) == 1 + 32 - return bytes(key) - - def _to_key_reverse(self, tx_dep_hash: Optional[bytes] = None, tx_hash: Optional[bytes] = None) -> bytes: - """Make a key for accessing REVERSE dependencies 'dict'""" - key = bytearray() - key.append(_Tag.REVERSE.value) - if tx_dep_hash is None: - assert tx_hash is None - assert len(key) == 1 - return bytes(key) - key.extend(tx_dep_hash) - if tx_hash is None: - assert len(key) == 1 + 32 - return bytes(key) - key.extend(tx_hash) - assert len(key) == 1 + 32 + 32 - return bytes(key) - - def _to_key_needed(self, tx_dep_hash: Optional[bytes] = None) -> bytes: - """Make a key for accessing NEEDED txs 'dict'""" - key = bytearray() - key.append(_Tag.NEEDED.value) - if tx_dep_hash is None: - assert len(key) == 1 - return bytes(key) - key.extend(tx_dep_hash) - assert len(key) == 1 + 32 - return bytes(key) - - def _from_key_any(self, key: bytes) -> _KeyAny: - """Parse any key on the column-family, the returned object has a tag that determines the key type.""" - assert len(key) >= 1 - tag = _Tag(key[0]) - if tag is _Tag.READY: - assert len(key) == 1 + 32 - tx_hash = key[1:] - assert len(tx_hash) == 32 - return _KeyAny(tag, tx_hash, None) - elif tag is _Tag.REVERSE: - assert len(key) == 1 + 32 + 32 - tx_dep_hash = key[1:33] - tx_hash = key[33:] - assert len(tx_hash) == len(tx_dep_hash) == 32 - return _KeyAny(tag, tx_hash, tx_dep_hash) - elif tag is _Tag.NEEDED: - assert len(key) == 1 + 32 - tx_dep_hash = key[1:] - assert len(tx_dep_hash) == 32 - return _KeyAny(tag, None, tx_dep_hash) - else: - # XXX: if/elif is exhaustive for all possible tags and invalid tag value will fail sooner - raise NotImplementedError('unreachable') - - def _to_value_needed(self, height: int, tx_hash: bytes) -> bytes: - import struct - value = bytearray() - value.extend(struct.pack('!I', height)) - value.extend(tx_hash) - assert len(value) == 4 + 32 - return bytes(value) - - def _from_value_needed(self, value: bytes) -> tuple[int, bytes]: - import struct - assert len(value) == 4 + 32 - height, = struct.unpack('!I', value[:4]) - tx_hash = value[4:] - return height, tx_hash - - def _iter_rev_deps_of(self, tx_dep_hash: bytes) -> Iterator[bytes]: - it = self._db.iterkeys(self._cf) - seek_key = self._to_key_reverse(tx_dep_hash) - self.log.debug('seek to start', seek_key=seek_key.hex()) - it.seek(seek_key) - for _, key in it: - key_any = self._from_key_any(key) - if key_any.tag is not _Tag.REVERSE: - break - if key_any.tx_dep_hash != tx_dep_hash: - break - tx_hash = key_any.tx_hash - assert tx_hash is not None - self.log.debug('seek found', tx=tx_hash.hex()) - yield tx_hash - self.log.debug('seek end') - - def _del_from_deps(self, tx: BaseTransaction, batch: 'rocksdb.WriteBatch') -> None: - assert tx.hash is not None - for tx_dep_hash in tx.get_all_dependencies(): - batch.delete((self._cf, self._to_key_reverse(tx_dep_hash, tx.hash))) - - def _add_ready(self, tx_hash: bytes, batch: 'rocksdb.WriteBatch') -> None: - key = self._to_key_ready(tx_hash) - batch.put((self._cf, key), b'') - - def add_tx(self, tx: BaseTransaction, partial: bool = True) -> None: - import rocksdb - assert tx.hash is not None - assert tx.storage is not None - batch = rocksdb.WriteBatch() - validation = tx.get_metadata().validation - if validation.is_fully_connected(): - # discover if new txs are ready because of this tx - self._update_new_deps_ready(tx, batch) - # finally remove from rev deps - self._del_from_deps(tx, batch) - elif not partial: - raise ValueError('partial=False will only accept fully connected transactions') - else: - self._add_deps(tx, batch) - self._add_needed(tx, batch) - self._db.write(batch) - - def del_tx(self, tx: BaseTransaction) -> None: - import rocksdb - batch = rocksdb.WriteBatch() - self._del_from_deps(tx, batch) - self._db.write(batch) - - def _update_new_deps_ready(self, tx: BaseTransaction, batch: 'rocksdb.WriteBatch') -> None: - """Go over the reverse dependencies of tx and check if any of them are now ready to be validated. - - This is also idempotent. - """ - assert tx.hash is not None - assert tx.storage is not None - for candidate_hash in self._iter_rev_deps_of(tx.hash): - candidate_tx = tx.storage.get_transaction(candidate_hash) - if candidate_tx.is_ready_for_validation(): - self._add_ready(candidate_hash, batch) - - def _add_deps(self, tx: BaseTransaction, batch: 'rocksdb.WriteBatch') -> None: - assert tx.hash is not None - for dep in tx.get_all_dependencies(): - batch.put((self._cf, self._to_key_reverse(dep, tx.hash)), b'') - - def _add_needed(self, tx: BaseTransaction, batch: 'rocksdb.WriteBatch') -> None: - assert tx.hash is not None - assert tx.storage is not None - tx_storage = tx.storage - - height = get_requested_from_height(tx) - self.log.debug('add needed deps', tx=tx.hash_hex, height=height, type=type(tx).__name__) - # get_all_dependencies is needed to ensure that we get the inputs that aren't reachable through parents alone, - # this can happen for inputs that have not been confirmed as of the block the confirms the block or transaction - # that we're adding the dependencies of - for tx_dep_hash in tx.get_all_dependencies(): - # It may happen that we have one of the dependencies already, so just add the ones we don't have. We should - # add at least one dependency, otherwise this tx should be full validated - if not tx_storage.transaction_exists(tx_dep_hash): - self.log.debug('tx parent is needed', tx=tx.hash.hex(), tx_dep=tx_dep_hash.hex()) - batch.put((self._cf, self._to_key_needed(tx_dep_hash)), self._to_value_needed(height, tx.hash)) - - # also, remove the given transaction from needed, because we already have it - batch.delete((self._cf, self._to_key_needed(tx.hash))) - - def remove_ready_for_validation(self, tx: bytes) -> None: - self._db.delete((self._cf, self._to_key_ready(tx))) - - def next_ready_for_validation(self, tx_storage: 'TransactionStorage', *, dry_run: bool = False) -> Iterator[bytes]: - import rocksdb - batch = rocksdb.WriteBatch() - ready = self._drain_all_sorted_ready(tx_storage, batch) - if not dry_run: - self._db.write(batch) - while ready: - yield from ready - batch = rocksdb.WriteBatch() - ready = self._drain_all_sorted_ready(tx_storage, batch) - if not dry_run: - self._db.write(batch) - - def _drain_all_sorted_ready(self, tx_storage: 'TransactionStorage', batch: 'rocksdb.WriteBatch') -> list[bytes]: - ready = list(self._drain_all_ready(tx_storage, batch)) - ready.sort(key=lambda tx_hash: tx_storage.get_transaction(tx_hash).timestamp) - return ready - - def _drain_all_ready(self, tx_storage: 'TransactionStorage', batch: 'rocksdb.WriteBatch') -> Iterator[bytes]: - it = self._db.iterkeys(self._cf) - seek_key = self._to_key_ready() - self.log.debug('seek to start', seek_key=seek_key.hex()) - it.seek(seek_key) - for _, key in it: - key_any = self._from_key_any(key) - if key_any.tag is not _Tag.READY: - break - tx_hash = key_any.tx_hash - assert tx_hash is not None - self.log.debug('seek found', tx=tx_hash.hex()) - batch.delete((self._cf, key)) - yield tx_hash - self.log.debug('seek end') - - def iter(self) -> Iterator[bytes]: - yield from self._iter_has_rev_deps() - - def _iter_needed_txs(self) -> Iterator[bytes]: - yield from (tx for tx, _, __ in self._iter_needed()) - - def _iter_has_rev_deps(self) -> Iterator[bytes]: - it = self._db.iterkeys(self._cf) - seek_key = self._to_key_reverse() - self.log.debug('seek to start', seek_key=seek_key.hex()) - it.seek(seek_key) - for _, key in it: - key_any = self._from_key_any(key) - if key_any.tag is not _Tag.REVERSE: - break - tx_dep_hash = key_any.tx_dep_hash - assert tx_dep_hash is not None - self.log.debug('seek found', tx_dep=tx_dep_hash.hex()) - yield tx_dep_hash - # XXX: do this seek to skip directly to the next tx_dep_hash, otherwise we would have to iterate until the - # found key has a different tx_dep_hash - # XXX: also this assumes rocksdb skip will be faster than calling next repeatedly, an investigation should - # be made to confirm this - seek_key = incr_key(self._to_key_reverse(tx_dep_hash)) - it.seek(seek_key) - self.log.debug('seek end') - - def known_children(self, tx: BaseTransaction) -> list[bytes]: - assert tx.hash is not None - assert tx.storage is not None - it_rev_deps = map(tx.storage.get_transaction, self._get_rev_deps(tx.hash)) - return [not_none(rev.hash) for rev in it_rev_deps if tx.hash in rev.parents] - - def _get_rev_deps(self, tx: bytes) -> frozenset[bytes]: - """Get all txs that depend on the given tx (i.e. its reverse depdendencies).""" - return frozenset(self._iter_rev_deps_of(tx)) - - def has_needed_tx(self) -> bool: - return any(self._iter_needed()) - - def _iter_needed(self) -> Iterator[tuple[bytes, int, bytes]]: - """Iterate over needed txs items, which is a tuple of (tx_dep_hash, height, tx_requested_hash)""" - it = self._db.iteritems(self._cf) - seek_key = self._to_key_needed() - self.log.debug('seek to start', seek_key=seek_key.hex()) - it.seek(seek_key) - for (_, key), value in it: - key_any = self._from_key_any(key) - if key_any.tag is not _Tag.NEEDED: - break - tx_dep_hash = key_any.tx_dep_hash - assert tx_dep_hash is not None - height, tx_hash = self._from_value_needed(value) - self.log.debug('seek found', tx_dep=tx_dep_hash.hex()) - yield tx_dep_hash, height, tx_hash - self.log.debug('seek end') - - def is_tx_needed(self, tx: bytes) -> bool: - key_needed = self._to_key_needed(tx) - val = self._db.get((self._cf, key_needed)) - return val is not None - - def remove_from_needed_index(self, tx: bytes) -> None: - key_needed = self._to_key_needed(tx) - self._db.delete((self._cf, key_needed)) - - def iter_next_needed_txs(self) -> Iterator[bytes]: - for tx_hash, _, __ in self._iter_needed(): - yield tx_hash diff --git a/hathor/manager.py b/hathor/manager.py index 630c1b54c..43963478a 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -16,7 +16,7 @@ import sys import time from enum import Enum -from typing import Any, Iterable, Iterator, NamedTuple, Optional, Union +from typing import Any, Iterator, NamedTuple, Optional, Union from hathorlib.base_transaction import tx_or_block_from_bytes as lib_tx_or_block_from_bytes from structlog import get_logger @@ -25,12 +25,13 @@ from twisted.internet.task import LoopingCall from twisted.python.threadpool import ThreadPool -from hathor import daa from hathor.checkpoint import Checkpoint from hathor.conf.settings import HathorSettings from hathor.consensus import ConsensusAlgorithm +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event.event_manager import EventManager from hathor.exception import ( + BlockTemplateTimestampError, DoubleSpendingError, HathorError, InitializationError, @@ -43,11 +44,13 @@ from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.mining import BlockTemplate, BlockTemplates +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer_id import PeerId from hathor.p2p.protocol import HathorProtocol from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager +from hathor.reactor import ReactorProtocol as Reactor from hathor.stratum import StratumFactory from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights from hathor.transaction.exceptions import TxValidationError @@ -55,7 +58,7 @@ from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.tx_allow_scope import TxAllowScope from hathor.types import Address, VertexId -from hathor.util import EnvironmentInfo, LogDuration, Random, Reactor, calculate_min_significant_weight, not_none +from hathor.util import EnvironmentInfo, LogDuration, Random, calculate_min_significant_weight, not_none from hathor.verification.verification_service import VerificationService from hathor.wallet import BaseWallet @@ -89,6 +92,7 @@ def __init__(self, settings: HathorSettings, pubsub: PubSubManager, consensus_algorithm: ConsensusAlgorithm, + daa: DifficultyAdjustmentAlgorithm, peer_id: PeerId, tx_storage: TransactionStorage, p2p_manager: ConnectionsManager, @@ -96,6 +100,7 @@ def __init__(self, feature_service: FeatureService, bit_signaling_service: BitSignalingService, verification_service: VerificationService, + cpu_mining_service: CpuMiningService, network: str, hostname: Optional[str] = None, wallet: Optional[BaseWallet] = None, @@ -123,6 +128,7 @@ def __init__(self, ) self._settings = settings + self.daa = daa self._cmd_path: Optional[str] = None self.log = logger.new() @@ -173,6 +179,7 @@ def __init__(self, self._feature_service = feature_service self._bit_signaling_service = bit_signaling_service self.verification_service = verification_service + self.cpu_mining_service = cpu_mining_service self.consensus_algorithm = consensus_algorithm @@ -268,8 +275,6 @@ def start(self) -> None: if self._enable_event_queue: self._event_manager.start(not_none(self.my_peer.id)) - self._bit_signaling_service.start() - self.state = self.NodeState.INITIALIZING self.pubsub.publish(HathorEvents.MANAGER_ON_START) self._event_manager.load_started() @@ -451,8 +456,6 @@ def _initialize_components_full_verification(self) -> None: self.tx_storage.indexes.update(tx) if self.tx_storage.indexes.mempool_tips is not None: self.tx_storage.indexes.mempool_tips.update(tx) # XXX: move to indexes.update - if self.tx_storage.indexes.deps is not None: - self.sync_v2_step_validations([tx], quiet=True) self.tx_storage.save_transaction(tx, only_metadata=True) else: assert self.verification_service.validate_basic( @@ -509,10 +512,6 @@ def _initialize_components_full_verification(self) -> None: self.log.error('Error initializing the node. Checkpoint validation error.') sys.exit() - # restart all validations possible - if self.tx_storage.indexes.deps: - self._sync_v2_resume_validations() - best_height = self.tx_storage.get_height_best_block() if best_height != h: self.log.warn('best height doesn\'t match', best_height=best_height, max_height=h) @@ -545,6 +544,8 @@ def _initialize_components_new(self) -> None: self.tx_storage.pre_init() assert self.tx_storage.indexes is not None + self._bit_signaling_service.start() + started_at = int(time.time()) last_started_at = self.tx_storage.get_last_started_at() if last_started_at >= started_at: @@ -567,10 +568,6 @@ def _initialize_components_new(self) -> None: self.log.exception('Initialization error when checking checkpoints, cannot continue.') sys.exit() - # restart all validations possible - if self.tx_storage.indexes.deps is not None: - self._sync_v2_resume_validations() - # XXX: last step before actually starting is updating the last started at timestamps self.tx_storage.update_last_started_at(started_at) @@ -662,24 +659,6 @@ def _verify_checkpoints(self) -> None: f'hash {tx_hash.hex()} was found' ) - def _sync_v2_resume_validations(self) -> None: - """ This method will resume running validations that did not run because the node exited. - """ - assert self.tx_storage.indexes is not None - assert self.tx_storage.indexes.deps is not None - if self.tx_storage.indexes.deps.has_needed_tx(): - self.log.debug('run pending validations') - depended_final_txs: list[BaseTransaction] = [] - for tx_hash in self.tx_storage.indexes.deps.iter(): - if not self.tx_storage.transaction_exists(tx_hash): - continue - with self.tx_storage.allow_partially_validated_context(): - tx = self.tx_storage.get_transaction(tx_hash) - if tx.get_metadata().validation.is_final(): - depended_final_txs.append(tx) - self.sync_v2_step_validations(depended_final_txs, quiet=True) - self.log.debug('pending validations finished') - def get_new_tx_parents(self, timestamp: Optional[float] = None) -> list[VertexId]: """Select which transactions will be confirmed by a new transaction. @@ -755,7 +734,7 @@ def make_block_template(self, parent_block_hash: VertexId, timestamp: Optional[i assert isinstance(parent_block, Block) parent_txs = self.generate_parent_txs(parent_block.timestamp + self._settings.MAX_DISTANCE_BETWEEN_BLOCKS) if timestamp is None: - current_timestamp = int(max(self.tx_storage.latest_timestamp, self.reactor.seconds())) + current_timestamp = int(self.reactor.seconds()) else: current_timestamp = timestamp return self._make_block_template(parent_block, parent_txs, current_timestamp) @@ -807,6 +786,13 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur timestamp_max = min(timestamp_abs_max, timestamp_max_decay) else: timestamp_max = timestamp_abs_max + timestamp_max = min(timestamp_max, int(current_timestamp + self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED)) + if timestamp_max < timestamp_min: + raise BlockTemplateTimestampError( + f'Unable to create a block template because there is no timestamp available. ' + f'(min={timestamp_min}, max={timestamp_max}) ' + f'(current_timestamp={current_timestamp})' + ) timestamp = min(max(current_timestamp, timestamp_min), timestamp_max) parent_block_metadata = parent_block.get_metadata() # this is the min weight to cause an increase of twice the WEIGHT_TOL, we make sure to generate a template with @@ -816,7 +802,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur parent_block_metadata.score, 2 * self._settings.WEIGHT_TOL ) - weight = max(daa.calculate_next_weight(parent_block, timestamp), min_significant_weight) + weight = max(self.daa.calculate_next_weight(parent_block, timestamp), min_significant_weight) height = parent_block.get_height() + 1 parents = [parent_block.hash] + parent_txs.must_include parents_any = parent_txs.can_include @@ -830,7 +816,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur assert len(parents_any) == 0, 'Extra parents to choose from that cannot be chosen' return BlockTemplate( versions={TxVersion.REGULAR_BLOCK.value, TxVersion.MERGE_MINED_BLOCK.value}, - reward=daa.get_tokens_issued_per_block(height), + reward=self.daa.get_tokens_issued_per_block(height), weight=weight, timestamp_now=current_timestamp, timestamp_min=timestamp_min, @@ -867,7 +853,7 @@ def generate_mining_block(self, timestamp: Optional[int] = None, def get_tokens_issued_per_block(self, height: int) -> int: """Return the number of tokens issued (aka reward) per block of a given height.""" - return daa.get_tokens_issued_per_block(height) + return self.daa.get_tokens_issued_per_block(height) def submit_block(self, blk: Block, fails_silently: bool = True) -> bool: """Used by submit block from all mining APIs. @@ -916,9 +902,10 @@ def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False, raise NonStandardTxError('Transaction is non standard.') # Validate tx. - success, message = self.verification_service.validate_vertex_error(tx) - if not success: - raise InvalidNewTransaction(message) + try: + self.verification_service.verify(tx) + except TxValidationError as e: + raise InvalidNewTransaction(str(e)) self.propagate_tx(tx, fails_silently=False) @@ -1050,37 +1037,6 @@ def log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) log_func = self.log.debug log_func(message, **kwargs) - def sync_v2_step_validations(self, txs: Iterable[BaseTransaction], *, quiet: bool) -> None: - """ Step all validations until none can be stepped anymore. - """ - assert self.tx_storage.indexes is not None - assert self.tx_storage.indexes.deps is not None - # cur_txs will be empty when there are no more new txs that reached full - # validation because of an initial trigger - for ready_tx in txs: - assert ready_tx.hash is not None - self.tx_storage.indexes.deps.remove_ready_for_validation(ready_tx.hash) - with self.tx_storage.allow_partially_validated_context(): - for tx in map(self.tx_storage.get_transaction, - self.tx_storage.indexes.deps.next_ready_for_validation(self.tx_storage)): - assert tx.hash is not None - tx.update_initial_metadata() - with self.tx_storage.allow_only_valid_context(): - try: - # XXX: `reject_locked_reward` might not apply, partial validation is only used on sync-v2 - # TODO: deal with `reject_locked_reward` on sync-v2 - assert self.verification_service.validate_full(tx, reject_locked_reward=False) - except (AssertionError, HathorError): - # TODO - raise - else: - self.tx_storage.add_to_indexes(tx) - self.consensus_algorithm.update(tx) - self.tx_storage.indexes.update(tx) - if self.tx_storage.indexes.mempool_tips: - self.tx_storage.indexes.mempool_tips.update(tx) # XXX: move to indexes.update - self.tx_fully_validated(tx, quiet=quiet) - def tx_fully_validated(self, tx: BaseTransaction, *, quiet: bool) -> None: """ Handle operations that need to happen once the tx becomes fully validated. @@ -1120,7 +1076,7 @@ def _log_feature_states(self, vertex: BaseTransaction) -> None: features_states=state_by_feature ) - features = [Feature.NOP_FEATURE_1, Feature.NOP_FEATURE_2, Feature.NOP_FEATURE_3] + features = [Feature.NOP_FEATURE_4, Feature.NOP_FEATURE_5, Feature.NOP_FEATURE_6] for feature in features: self._log_if_feature_is_active(vertex, feature) @@ -1165,7 +1121,8 @@ def has_recent_activity(self) -> bool: return True - def is_healthy(self) -> tuple[bool, Optional[str]]: + def is_sync_healthy(self) -> tuple[bool, Optional[str]]: + # This checks whether the last txs (blocks or transactions) we received are recent enough. if not self.has_recent_activity(): return False, HathorManager.UnhealthinessReason.NO_RECENT_ACTIVITY diff --git a/hathor/metrics.py b/hathor/metrics.py index 2c27955be..64ee2bd08 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -22,11 +22,11 @@ from hathor.conf import HathorSettings from hathor.p2p.manager import ConnectionsManager, PeerConnectionsMetrics from hathor.pubsub import EventArguments, HathorEvents, PubSubManager +from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction.base_transaction import sum_weights from hathor.transaction.block import Block from hathor.transaction.storage import TransactionRocksDBStorage, TransactionStorage from hathor.transaction.storage.cache_storage import TransactionCacheStorage -from hathor.util import Reactor if TYPE_CHECKING: from hathor.stratum import StratumFactory # noqa: F401 @@ -63,7 +63,7 @@ class Metrics: connections: ConnectionsManager tx_storage: TransactionStorage # Twisted reactor that handles the time and callLater - reactor: Optional[Reactor] = None + reactor: Reactor # Transactions count in the network transactions: int = 0 @@ -127,10 +127,6 @@ def __post_init__(self) -> None: # Stores caculated block weights saved in tx storage self.weight_block_deque: deque[WeightValue] = deque(maxlen=self.weight_block_deque_len) - if self.reactor is None: - from hathor.util import reactor as twisted_reactor - self.reactor = twisted_reactor - # A timer to periodically collect data self._lc_collect_data = LoopingCall(self._collect_data) self._lc_collect_data.clock = self.reactor diff --git a/hathor/mining/cpu_mining_service.py b/hathor/mining/cpu_mining_service.py new file mode 100644 index 000000000..fb84bb405 --- /dev/null +++ b/hathor/mining/cpu_mining_service.py @@ -0,0 +1,91 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +from typing import Callable, Optional + +from hathor.transaction import BaseTransaction +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.types import VertexId + +MAX_NONCE = 2**32 + + +class CpuMiningService: + def resolve(self, vertex: BaseTransaction, *, update_time: bool = False) -> bool: + """Run a CPU mining looking for the nonce that solves the proof-of-work + + The `vertex.weight` must be set before calling this method. + + :param update_time: update timestamp every 2 seconds + :return: True if a solution was found + :rtype: bool + """ + hash_bytes = self.start_mining(vertex, update_time=update_time) + + if hash_bytes: + vertex.hash = hash_bytes + metadata = getattr(vertex, '_metadata', None) + if metadata is not None and metadata.hash is not None: + metadata.hash = hash_bytes + + if isinstance(vertex, TokenCreationTransaction): + vertex.tokens = [vertex.hash] + + return True + else: + return False + + @staticmethod + def start_mining( + vertex: BaseTransaction, + *, + start: int = 0, + end: int = MAX_NONCE, + sleep_seconds: float = 0.0, + update_time: bool = True, + should_stop: Callable[[], bool] = lambda: False + ) -> Optional[VertexId]: + """Starts mining until it solves the problem, i.e., finds the nonce that satisfies the conditions + + :param start: beginning of the search interval + :param end: end of the search interval + :param sleep_seconds: the number of seconds it will sleep after each attempt + :param update_time: update timestamp every 2 seconds + :return The hash of the solved PoW or None when it is not found + """ + pow_part1 = vertex.calculate_hash1() + target = vertex.get_target() + vertex.nonce = start + last_time = time.time() + while vertex.nonce < end: + if update_time: + now = time.time() + if now - last_time > 2: + if should_stop(): + return None + vertex.timestamp = int(now) + pow_part1 = vertex.calculate_hash1() + last_time = now + vertex.nonce = start + + result = vertex.calculate_hash2(pow_part1.copy()) + if int(result.hex(), vertex.HEX_BASE) < target: + return result + vertex.nonce += 1 + if sleep_seconds > 0: + time.sleep(sleep_seconds) + if should_stop(): + return None + return None diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 7aefaee3f..f7c60b1bb 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -34,8 +34,9 @@ from hathor.p2p.sync_version import SyncVersion from hathor.p2p.utils import description_to_connection_string, parse_whitelist from hathor.pubsub import HathorEvents, PubSubManager +from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction -from hathor.util import Random, Reactor +from hathor.util import Random if TYPE_CHECKING: from twisted.internet.interfaces import IDelayedCall @@ -86,6 +87,7 @@ class GlobalRateLimiter: handshaking_peers: set[HathorProtocol] whitelist_only: bool _sync_factories: dict[SyncVersion, SyncAgentFactory] + _enabled_sync_versions: set[SyncVersion] rate_limiter: RateLimiter @@ -96,17 +98,7 @@ def __init__(self, pubsub: PubSubManager, ssl: bool, rng: Random, - whitelist_only: bool, - enable_sync_v1: bool, - enable_sync_v2: bool, - enable_sync_v1_1: bool) -> None: - from hathor.p2p.sync_v1.factory_v1_0 import SyncV10Factory - from hathor.p2p.sync_v1.factory_v1_1 import SyncV11Factory - from hathor.p2p.sync_v2.factory import SyncV2Factory - - if not (enable_sync_v1 or enable_sync_v1_1 or enable_sync_v2): - raise TypeError(f'{type(self).__name__}() at least one sync version is required') - + whitelist_only: bool) -> None: self.log = logger.new() self.rng = rng self.manager = None @@ -186,30 +178,62 @@ def __init__(self, # Parameter to explicitly enable whitelist-only mode, when False it will still check the whitelist for sync-v1 self.whitelist_only = whitelist_only - self.enable_sync_v1 = enable_sync_v1 - self.enable_sync_v1_1 = enable_sync_v1_1 - self.enable_sync_v2 = enable_sync_v2 - # Timestamp when the last discovery ran self._last_discovery: float = 0. # sync-manager factories self._sync_factories = {} - if enable_sync_v1: - self._sync_factories[SyncVersion.V1] = SyncV10Factory(self) - if enable_sync_v1_1: - self._sync_factories[SyncVersion.V1_1] = SyncV11Factory(self) - if enable_sync_v2: - self._sync_factories[SyncVersion.V2] = SyncV2Factory(self) + self._enabled_sync_versions = set() + + def add_sync_factory(self, sync_version: SyncVersion, sync_factory: SyncAgentFactory) -> None: + """Add factory for the given sync version, must use a sync version that does not already exist.""" + # XXX: to allow code in `set_manager` to safely use the the available sync versions, we add this restriction: + assert self.manager is None, 'Cannot modify sync factories after a manager is set' + if sync_version in self._sync_factories: + raise ValueError('sync version already exists') + self._sync_factories[sync_version] = sync_factory + + def get_available_sync_versions(self) -> set[SyncVersion]: + """What sync versions the manager is capable of using, they are not necessarily enabled.""" + return set(self._sync_factories.keys()) + + def is_sync_version_available(self, sync_version: SyncVersion) -> bool: + """Whether the given sync version is available for use, is not necessarily enabled.""" + return sync_version in self._sync_factories + + def get_enabled_sync_versions(self) -> set[SyncVersion]: + """What sync versions are enabled for use, it is necessarily a subset of the available versions.""" + return self._enabled_sync_versions.copy() + + def is_sync_version_enabled(self, sync_version: SyncVersion) -> bool: + """Whether the given sync version is enabled for use, being enabled implies being available.""" + return sync_version in self._enabled_sync_versions + + def enable_sync_version(self, sync_version: SyncVersion) -> None: + """Enable using the given sync version on new connections, it must be available before being enabled.""" + assert sync_version in self._sync_factories + if sync_version in self._enabled_sync_versions: + self.log.info('tried to enable a sync verison that was already enabled, nothing to do') + return + self._enabled_sync_versions.add(sync_version) + + def disable_sync_version(self, sync_version: SyncVersion) -> None: + """Disable using the given sync version, it WILL NOT close connections using the given version.""" + if sync_version not in self._enabled_sync_versions: + self.log.info('tried to disable a sync verison that was already disabled, nothing to do') + return + self._enabled_sync_versions.discard(sync_version) def set_manager(self, manager: 'HathorManager') -> None: """Set the manager. This method must be called before start().""" + if len(self._enabled_sync_versions) == 0: + raise TypeError('Class built incorrectly without any enabled sync version') + self.manager = manager - if self.enable_sync_v2: + if self.is_sync_version_available(SyncVersion.V2): assert self.manager.tx_storage.indexes is not None indexes = self.manager.tx_storage.indexes self.log.debug('enable sync-v2 indexes') - indexes.enable_deps_index() indexes.enable_mempool_index() def add_listen_address(self, addr: str) -> None: @@ -225,7 +249,8 @@ def do_discovery(self) -> None: Do a discovery and connect on all discovery strategies. """ for peer_discovery in self.peer_discoveries: - peer_discovery.discover_and_connect(self.connect_to) + coro = peer_discovery.discover_and_connect(self.connect_to) + Deferred.fromCoroutine(coro) def disable_rate_limiter(self) -> None: """Disable global rate limiter.""" @@ -241,6 +266,10 @@ def enable_rate_limiter(self, max_hits: int = 16, window_seconds: float = 1) -> ) def start(self) -> None: + """Listen on the given address descriptions and start accepting and processing connections.""" + if self.manager is None: + raise TypeError('Class was built incorrectly without a HathorManager.') + self.lc_reconnect.start(5, now=False) self.lc_sync_update.start(self.lc_sync_update_interval, now=False) @@ -284,19 +313,9 @@ def _get_peers_count(self) -> PeerConnectionsMetrics: len(self.peer_storage) ) - def get_sync_versions(self) -> set[SyncVersion]: - """Set of versions that were enabled and are supported.""" - assert self.manager is not None - if self.manager.has_sync_version_capability(): - return set(self._sync_factories.keys()) - else: - assert SyncVersion.V1 in self._sync_factories, 'sync-versions capability disabled, but sync-v1 not enabled' - # XXX: this is to make it easy to simulate old behavior if we disable the sync-version capability - return {SyncVersion.V1} - def get_sync_factory(self, sync_version: SyncVersion) -> SyncAgentFactory: - """Get the sync factory for a given version, support MUST be checked beforehand or it will raise an assert.""" - assert sync_version in self._sync_factories, 'get_sync_factory must be called for a supported version' + """Get the sync factory for a given version, MUST be available or it will raise an assert.""" + assert sync_version in self._sync_factories, f'sync_version {sync_version} is not available' return self._sync_factories[sync_version] def has_synced_peer(self) -> bool: diff --git a/hathor/p2p/messages.py b/hathor/p2p/messages.py index 91b20d8e6..8ee219403 100644 --- a/hathor/p2p/messages.py +++ b/hathor/p2p/messages.py @@ -120,3 +120,4 @@ class ProtocolMessages(Enum): PEER_BLOCK_HASHES = 'PEER-BLOCK-HASHES' STOP_BLOCK_STREAMING = 'STOP-BLOCK-STREAMING' + STOP_TRANSACTIONS_STREAMING = 'STOP-TRANSACTIONS-STREAMING' diff --git a/hathor/p2p/netfilter/matches_remote.py b/hathor/p2p/netfilter/matches_remote.py index 79b011e20..734c4ea13 100644 --- a/hathor/p2p/netfilter/matches_remote.py +++ b/hathor/p2p/netfilter/matches_remote.py @@ -20,7 +20,7 @@ from twisted.internet.task import LoopingCall from hathor.p2p.netfilter.matches import NetfilterMatch, NetfilterMatchIPAddress -from hathor.util import Reactor +from hathor.reactor import ReactorProtocol as Reactor if TYPE_CHECKING: from hathor.p2p.netfilter.context import NetfilterContext diff --git a/hathor/p2p/peer_discovery.py b/hathor/p2p/peer_discovery.py index 8730b7ecb..a202f6409 100644 --- a/hathor/p2p/peer_discovery.py +++ b/hathor/p2p/peer_discovery.py @@ -14,13 +14,13 @@ import socket from abc import ABC, abstractmethod -from typing import Any, Callable, Generator +from typing import Callable from structlog import get_logger from twisted.internet import defer -from twisted.internet.defer import inlineCallbacks from twisted.names.client import lookupAddress, lookupText from twisted.names.dns import Record_A, Record_TXT, RRHeader +from typing_extensions import override logger = get_logger() @@ -30,7 +30,7 @@ class PeerDiscovery(ABC): """ @abstractmethod - def discover_and_connect(self, connect_to: Callable[[str], None]) -> Any: + async def discover_and_connect(self, connect_to: Callable[[str], None]) -> None: """ This method must discover the peers and call `connect_to` for each of them. :param connect_to: Function which will be called for each discovered peer. @@ -51,7 +51,8 @@ def __init__(self, descriptions: list[str]): self.log = logger.new() self.descriptions = descriptions - def discover_and_connect(self, connect_to: Callable[[str], None]) -> Any: + @override + async def discover_and_connect(self, connect_to: Callable[[str], None]) -> None: for description in self.descriptions: connect_to(description) @@ -70,18 +71,17 @@ def __init__(self, hosts: list[str], default_port: int = 40403, test_mode: int = self.default_port = default_port self.test_mode = test_mode - @inlineCallbacks - def discover_and_connect(self, connect_to: Callable[[str], None]) -> Generator[Any, Any, None]: + @override + async def discover_and_connect(self, connect_to: Callable[[str], None]) -> None: """ Run DNS lookup for host and connect to it This is executed when starting the DNS Peer Discovery and first connecting to the network """ for host in self.hosts: - url_list = yield self.dns_seed_lookup(host) + url_list = await self.dns_seed_lookup(host) for url in url_list: connect_to(url) - @inlineCallbacks - def dns_seed_lookup(self, host: str) -> Generator[Any, Any, list[str]]: + async def dns_seed_lookup(self, host: str) -> list[str]: """ Run a DNS lookup for TXT, A, and AAAA records and return a list of connection strings. """ if self.test_mode: @@ -97,7 +97,7 @@ def dns_seed_lookup(self, host: str) -> Generator[Any, Any, list[str]]: d2.addErrback(self.errback), d = defer.gatherResults([d1, d2]) - results = yield d + results = await d unique_urls: set[str] = set() for urls in results: unique_urls.update(urls) diff --git a/hathor/p2p/peer_id.py b/hathor/p2p/peer_id.py index 532502ab8..f3122c34f 100644 --- a/hathor/p2p/peer_id.py +++ b/hathor/p2p/peer_id.py @@ -16,7 +16,7 @@ import hashlib from enum import Enum from math import inf -from typing import TYPE_CHECKING, Any, Generator, Optional, cast +from typing import TYPE_CHECKING, Any, Optional, cast from cryptography import x509 from cryptography.exceptions import InvalidSignature @@ -24,13 +24,13 @@ from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import padding, rsa from OpenSSL.crypto import X509, PKey -from twisted.internet.defer import inlineCallbacks from twisted.internet.interfaces import ISSLTransport from twisted.internet.ssl import Certificate, CertificateOptions, TLSVersion, trustRootFromCertificates -from hathor import daa from hathor.conf.get_settings import get_settings +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.p2p.utils import connection_string_to_host, discover_dns, generate_certificate +from hathor.util import not_none if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol # noqa: F401 @@ -323,8 +323,7 @@ def _get_certificate_options(self) -> CertificateOptions: ) return certificate_options - @inlineCallbacks - def validate_entrypoint(self, protocol: 'HathorProtocol') -> Generator[Any, Any, bool]: + async def validate_entrypoint(self, protocol: 'HathorProtocol') -> bool: """ Validates if connection entrypoint is one of the peer entrypoints """ found_entrypoint = False @@ -347,7 +346,8 @@ def validate_entrypoint(self, protocol: 'HathorProtocol') -> Generator[Any, Any, break host = connection_string_to_host(entrypoint) # TODO: don't use `daa.TEST_MODE` for this - result = yield discover_dns(host, daa.TEST_MODE) + test_mode = not_none(DifficultyAdjustmentAlgorithm.singleton).TEST_MODE + result = await discover_dns(host, test_mode) if protocol.connection_string in result: # Found the entrypoint found_entrypoint = True @@ -366,7 +366,8 @@ def validate_entrypoint(self, protocol: 'HathorProtocol') -> Generator[Any, Any, if connection_host == host: found_entrypoint = True break - result = yield discover_dns(host, daa.TEST_MODE) + test_mode = not_none(DifficultyAdjustmentAlgorithm.singleton).TEST_MODE + result = await discover_dns(host, test_mode) if connection_host in [connection_string_to_host(x) for x in result]: # Found the entrypoint found_entrypoint = True diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index 3df296466..696ba3c07 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -14,7 +14,7 @@ import time from enum import Enum -from typing import TYPE_CHECKING, Any, Generator, Optional, cast +from typing import TYPE_CHECKING, Any, Coroutine, Generator, Optional, cast from structlog import get_logger from twisted.internet.defer import Deferred @@ -311,7 +311,8 @@ def recv_message(self, cmd: ProtocolMessages, payload: str) -> Optional[Deferred fn = self.state.cmd_map.get(cmd) if fn is not None: try: - return fn(payload) + result = fn(payload) + return Deferred.fromCoroutine(result) if isinstance(result, Coroutine) else result except Exception: self.log.warn('recv_message processing error', exc_info=True) raise diff --git a/hathor/p2p/rate_limiter.py b/hathor/p2p/rate_limiter.py index f341ffb91..defbd9342 100644 --- a/hathor/p2p/rate_limiter.py +++ b/hathor/p2p/rate_limiter.py @@ -14,7 +14,7 @@ from typing import NamedTuple, Optional -from hathor.util import Reactor +from hathor.reactor import ReactorProtocol as Reactor class RateLimiterLimit(NamedTuple): @@ -32,12 +32,9 @@ class RateLimiter: # Stores the last hit for each key hits: dict[str, RateLimiterLimit] - def __init__(self, reactor: Optional[Reactor] = None): + def __init__(self, reactor: Reactor): self.keys = {} self.hits = {} - if reactor is None: - from hathor.util import reactor as twisted_reactor - reactor = twisted_reactor self.reactor = reactor def set_limit(self, key: str, max_hits: int, window_seconds: float) -> None: diff --git a/hathor/p2p/resources/healthcheck.py b/hathor/p2p/resources/healthcheck.py index 3e1c1e368..a87182b8c 100644 --- a/hathor/p2p/resources/healthcheck.py +++ b/hathor/p2p/resources/healthcheck.py @@ -17,7 +17,7 @@ def render_GET(self, request): :rtype: string (json) """ - healthy, reason = self.manager.is_healthy() + healthy, reason = self.manager.is_sync_healthy() if not healthy: request.setResponseCode(503) diff --git a/hathor/p2p/resources/mining_info.py b/hathor/p2p/resources/mining_info.py index 4aae45616..8263ee273 100644 --- a/hathor/p2p/resources/mining_info.py +++ b/hathor/p2p/resources/mining_info.py @@ -17,7 +17,6 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.conf.get_settings import get_settings -from hathor.daa import get_mined_tokens from hathor.difficulty import Weight from hathor.util import json_dumpb @@ -57,7 +56,7 @@ def render_GET(self, request): parent = block.get_block_parent() hashrate = 2**(parent.weight - log(30, 2)) - mined_tokens = get_mined_tokens(height) + mined_tokens = self.manager.daa.get_mined_tokens(height) data = { 'hashrate': hashrate, diff --git a/hathor/p2p/states/base.py b/hathor/p2p/states/base.py index ee07bc931..abbc17dd0 100644 --- a/hathor/p2p/states/base.py +++ b/hathor/p2p/states/base.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Callable, Optional, Union +from collections.abc import Coroutine +from typing import TYPE_CHECKING, Any, Callable, Optional from structlog import get_logger from twisted.internet.defer import Deferred @@ -27,7 +28,10 @@ class BaseState: protocol: 'HathorProtocol' - cmd_map: dict[ProtocolMessages, Union[Callable[[str], None], Callable[[str], Deferred[None]]]] + cmd_map: dict[ + ProtocolMessages, + Callable[[str], None] | Callable[[str], Deferred[None]] | Callable[[str], Coroutine[Deferred[None], Any, None]] + ] def __init__(self, protocol: 'HathorProtocol'): self.log = logger.new(**protocol.get_logger_context()) diff --git a/hathor/p2p/states/hello.py b/hathor/p2p/states/hello.py index d731e2bfa..56f514dd7 100644 --- a/hathor/p2p/states/hello.py +++ b/hathor/p2p/states/hello.py @@ -64,10 +64,10 @@ def _get_hello_data(self) -> dict[str, Any]: return data def _get_sync_versions(self) -> set[SyncVersion]: - """Shortcut to ConnectionManager.get_sync_versions""" + """Shortcut to ConnectionManager.get_enabled_sync_versions""" connections_manager = self.protocol.connections assert connections_manager is not None - return connections_manager.get_sync_versions() + return connections_manager.get_enabled_sync_versions() def on_enter(self) -> None: # After a connection is made, we just send a HELLO message. @@ -181,4 +181,4 @@ def _parse_sync_versions(hello_data: dict[str, Any]) -> set[SyncVersion]: return set(SyncVersion(x) for x in recognized_values) else: # XXX: implied value when sync-version capability isn't present - return {SyncVersion.V1} + return {SyncVersion.V1_1} diff --git a/hathor/p2p/states/peer_id.py b/hathor/p2p/states/peer_id.py index 9b91b5b62..b2e1f0a50 100644 --- a/hathor/p2p/states/peer_id.py +++ b/hathor/p2p/states/peer_id.py @@ -12,10 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Generator +from typing import TYPE_CHECKING from structlog import get_logger -from twisted.internet.defer import inlineCallbacks from hathor.conf import HathorSettings from hathor.p2p.messages import ProtocolMessages @@ -77,8 +76,7 @@ def send_peer_id(self) -> None: } self.send_message(ProtocolMessages.PEER_ID, json_dumps(hello)) - @inlineCallbacks - def handle_peer_id(self, payload: str) -> Generator[Any, Any, None]: + async def handle_peer_id(self, payload: str) -> None: """ Executed when a PEER-ID is received. It basically checks the identity of the peer. Only after this step, the peer connection is considered established and ready to communicate. @@ -117,7 +115,7 @@ def handle_peer_id(self, payload: str) -> Generator[Any, Any, None]: protocol.send_error_and_close_connection('We are already connected.') return - entrypoint_valid = yield peer.validate_entrypoint(protocol) + entrypoint_valid = await peer.validate_entrypoint(protocol) if not entrypoint_valid: protocol.send_error_and_close_connection('Connection string is not in the entrypoints.') return diff --git a/hathor/p2p/sync_factory.py b/hathor/p2p/sync_factory.py index 4f04a734b..f4883f21a 100644 --- a/hathor/p2p/sync_factory.py +++ b/hathor/p2p/sync_factory.py @@ -13,10 +13,10 @@ # limitations under the License. from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from hathor.p2p.sync_agent import SyncAgent -from hathor.util import Reactor +from hathor.reactor import ReactorProtocol as Reactor if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol @@ -24,5 +24,5 @@ class SyncAgentFactory(ABC): @abstractmethod - def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None) -> SyncAgent: + def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Reactor) -> SyncAgent: pass diff --git a/hathor/p2p/sync_v1/agent.py b/hathor/p2p/sync_v1/agent.py index 8a53fd962..cb300907c 100644 --- a/hathor/p2p/sync_v1/agent.py +++ b/hathor/p2p/sync_v1/agent.py @@ -14,25 +14,23 @@ import base64 import struct -from collections import OrderedDict from math import inf from typing import TYPE_CHECKING, Any, Callable, Generator, Iterator, Optional from weakref import WeakSet from structlog import get_logger from twisted.internet.defer import Deferred, inlineCallbacks -from twisted.internet.interfaces import IConsumer, IDelayedCall, IPushProducer -from zope.interface import implementer +from twisted.internet.interfaces import IDelayedCall from hathor.conf.get_settings import get_settings from hathor.p2p.messages import GetNextPayload, GetTipsPayload, NextPayload, ProtocolMessages, TipsPayload from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.sync_v1.downloader import Downloader +from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction from hathor.transaction.base_transaction import tx_or_block_from_bytes from hathor.transaction.storage.exceptions import TransactionDoesNotExist -from hathor.util import Reactor, json_dumps, json_loads -from hathor.utils.zope import asserted_cast +from hathor.util import json_dumps, json_loads logger = get_logger() @@ -52,126 +50,6 @@ def _get_deps(tx: BaseTransaction) -> Iterator[bytes]: yield txin.tx_id -@implementer(IPushProducer) -class SendDataPush: - """ Prioritize blocks over transactions when pushing data to peers. - """ - def __init__(self, node_sync: 'NodeSyncTimestamp'): - self.node_sync = node_sync - self.protocol: 'HathorProtocol' = node_sync.protocol - assert self.protocol.transport is not None - consumer = asserted_cast(IConsumer, self.protocol.transport) - self.consumer = consumer - self.is_running: bool = False - self.is_producing: bool = False - - self.queue: OrderedDict[bytes, tuple[BaseTransaction, list[bytes]]] = OrderedDict() - self.priority_queue: OrderedDict[bytes, tuple[BaseTransaction, list[bytes]]] = OrderedDict() - - self.delayed_call: Optional[IDelayedCall] = None - - def start(self) -> None: - """ Start pushing data. - """ - if self.is_running: - raise Exception('SendDataPush is already started.') - self.is_running = True - self.consumer.registerProducer(self, True) - self.resumeProducing() - - def stop(self) -> None: - """ Stop pushing data. - """ - if not self.is_running: - raise Exception('SendDataPush is already stopped.') - self.is_running = False - self.pauseProducing() - self.consumer.unregisterProducer() - - def schedule_if_needed(self) -> None: - """ Schedule `send_next` if needed. - """ - if not self.is_running: - return - - if not self.is_producing: - return - - if self.delayed_call and self.delayed_call.active(): - return - - if len(self.queue) > 0 or len(self.priority_queue) > 0: - self.delayed_call = self.node_sync.reactor.callLater(0, self.send_next) - - def add(self, tx: BaseTransaction) -> None: - """ Add a new block/transaction to be pushed. - """ - assert tx.hash is not None - if tx.is_block: - self.add_to_priority(tx) - else: - deps = list(_get_deps(tx)) - self.queue[tx.hash] = (tx, deps) - self.schedule_if_needed() - - def add_to_priority(self, tx: BaseTransaction) -> None: - """ Add a new block/transaction to be pushed with priority. - """ - assert tx.hash is not None - assert tx.hash not in self.queue - if tx.hash in self.priority_queue: - return - deps = list(_get_deps(tx)) - for h in deps: - if h in self.queue: - tx2, _ = self.queue.pop(h) - self.add_to_priority(tx2) - self.priority_queue[tx.hash] = (tx, deps) - self.schedule_if_needed() - - def send_next(self) -> None: - """ Push next block/transaction to peer. - """ - assert self.is_running - assert self.is_producing - - if len(self.priority_queue) > 0: - # Send blocks first. - _, (tx, _) = self.priority_queue.popitem(last=False) - - elif len(self.queue) > 0: - # Otherwise, send in order. - _, (tx, _) = self.queue.popitem(last=False) - - else: - # Nothing to send. - self.delayed_call = None - return - - self.node_sync.send_data(tx) - self.schedule_if_needed() - - def resumeProducing(self) -> None: - """ This method is automatically called to resume pushing data. - """ - self.is_producing = True - self.schedule_if_needed() - - def pauseProducing(self) -> None: - """ This method is automatically called to pause pushing data. - """ - self.is_producing = False - if self.delayed_call and self.delayed_call.active(): - self.delayed_call.cancel() - - def stopProducing(self) -> None: - """ This method is automatically called to stop pushing data. - """ - self.pauseProducing() - self.queue.clear() - self.priority_queue.clear() - - class NodeSyncTimestamp(SyncAgent): """ An algorithm to sync the DAG between two peers using the timestamp of the transactions. @@ -182,7 +60,7 @@ class NodeSyncTimestamp(SyncAgent): MAX_HASHES: int = 40 - def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: Optional[Reactor] = None) -> None: + def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: Reactor) -> None: """ :param protocol: Protocol of the connection. :type protocol: HathorProtocol @@ -195,9 +73,6 @@ def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: self.manager = protocol.node self.downloader = downloader - if reactor is None: - from hathor.util import reactor as twisted_reactor - reactor = twisted_reactor self.reactor: Reactor = reactor # Rate limit for this connection. @@ -218,8 +93,6 @@ def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: # This number may decrease if a new transaction/block arrives in a timestamp smaller than it. self.synced_timestamp: int = 0 - self.send_data_queue: SendDataPush = SendDataPush(self) - # Latest data timestamp of the peer. self.previous_timestamp: int = 0 @@ -274,8 +147,6 @@ def start(self) -> None: if self._started: raise Exception('NodeSyncTimestamp is already running') self._started = True - if self.send_data_queue: - self.send_data_queue.start() self.next_step() def stop(self) -> None: @@ -284,8 +155,6 @@ def stop(self) -> None: if not self._started: raise Exception('NodeSyncTimestamp is already stopped') self._started = False - if self.send_data_queue and self.send_data_queue.is_running: - self.send_data_queue.stop() if self.call_later_id and self.call_later_id.active(): self.call_later_id.cancel() for call_later in self._send_tips_call_later: @@ -330,10 +199,7 @@ def send_tx_to_peer_if_possible(self, tx: BaseTransaction) -> None: if parent.timestamp > self.synced_timestamp: return - if self.send_data_queue: - self.send_data_queue.add(tx) - else: - self.send_data(tx) + self.send_data(tx) def get_peer_next(self, timestamp: Optional[int] = None, offset: int = 0) -> Deferred[NextPayload]: """ A helper that returns a deferred that is called when the peer replies. diff --git a/hathor/p2p/sync_v1/factory_v1_1.py b/hathor/p2p/sync_v1/factory.py similarity index 94% rename from hathor/p2p/sync_v1/factory_v1_1.py rename to hathor/p2p/sync_v1/factory.py index 57d8819ae..d6fa55deb 100644 --- a/hathor/p2p/sync_v1/factory_v1_1.py +++ b/hathor/p2p/sync_v1/factory.py @@ -19,7 +19,7 @@ from hathor.p2p.sync_factory import SyncAgentFactory from hathor.p2p.sync_v1.agent import NodeSyncTimestamp from hathor.p2p.sync_v1.downloader import Downloader -from hathor.util import Reactor +from hathor.reactor import ReactorProtocol as Reactor if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol @@ -36,5 +36,5 @@ def get_downloader(self) -> Downloader: self._downloader = Downloader(self.connections.manager) return self._downloader - def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None) -> SyncAgent: + def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Reactor) -> SyncAgent: return NodeSyncTimestamp(protocol, downloader=self.get_downloader(), reactor=reactor) diff --git a/hathor/p2p/sync_v1/factory_v1_0.py b/hathor/p2p/sync_v1/factory_v1_0.py deleted file mode 100644 index acd430474..000000000 --- a/hathor/p2p/sync_v1/factory_v1_0.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING, Optional - -from hathor.p2p.manager import ConnectionsManager -from hathor.p2p.sync_agent import SyncAgent -from hathor.p2p.sync_factory import SyncAgentFactory -from hathor.p2p.sync_v1.agent import NodeSyncTimestamp -from hathor.p2p.sync_v1.downloader import Downloader -from hathor.util import Reactor - -if TYPE_CHECKING: - from hathor.p2p.protocol import HathorProtocol - - -class SyncV10Factory(SyncAgentFactory): - def __init__(self, connections: ConnectionsManager): - self.connections = connections - self._downloader: Optional[Downloader] = None - - def get_downloader(self) -> Downloader: - if self._downloader is None: - assert self.connections.manager is not None - self._downloader = Downloader(self.connections.manager) - return self._downloader - - def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None) -> SyncAgent: - return NodeSyncTimestamp(protocol, downloader=self.get_downloader(), reactor=reactor) diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 1d62ee8fc..c93ee57eb 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -18,32 +18,58 @@ import struct from collections import OrderedDict from enum import Enum -from typing import TYPE_CHECKING, Any, Callable, Generator, Optional, cast +from typing import TYPE_CHECKING, Any, Callable, Generator, NamedTuple, Optional from structlog import get_logger from twisted.internet.defer import Deferred, inlineCallbacks -from twisted.internet.task import LoopingCall +from twisted.internet.task import LoopingCall, deferLater from hathor.conf.get_settings import get_settings from hathor.p2p.messages import ProtocolMessages from hathor.p2p.sync_agent import SyncAgent +from hathor.p2p.sync_v2.blockchain_streaming_client import BlockchainStreamingClient, StreamingError from hathor.p2p.sync_v2.mempool import SyncMempoolManager -from hathor.p2p.sync_v2.streamers import DEFAULT_STREAMING_LIMIT, BlockchainStreaming, StreamEnd, TransactionsStreaming +from hathor.p2p.sync_v2.payloads import BestBlockPayload, GetNextBlocksPayload, GetTransactionsBFSPayload +from hathor.p2p.sync_v2.streamers import ( + DEFAULT_STREAMING_LIMIT, + BlockchainStreamingServer, + StreamEnd, + TransactionsStreamingServer, +) +from hathor.p2p.sync_v2.transaction_streaming_client import TransactionStreamingClient +from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.base_transaction import tx_or_block_from_bytes -from hathor.transaction.exceptions import HathorError from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.types import VertexId -from hathor.util import Reactor, collect_n +from hathor.util import not_none if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol + from hathor.transaction.storage import TransactionStorage logger = get_logger() MAX_GET_TRANSACTIONS_BFS_LEN: int = 8 +class _HeightInfo(NamedTuple): + height: int + id: VertexId + + def __repr__(self): + return f'_HeightInfo({self.height}, {self.id.hex()})' + + def __str__(self): + return f'({self.height}, {self.id.hex()})' + + def to_json(self) -> dict[str, Any]: + return { + 'height': self.height, + 'id': self.id.hex(), + } + + class PeerState(Enum): ERROR = 'error' UNKNOWN = 'unknown' @@ -57,7 +83,7 @@ class NodeBlockSync(SyncAgent): """ name: str = 'node-block-sync' - def __init__(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None) -> None: + def __init__(self, protocol: 'HathorProtocol', reactor: Reactor) -> None: """ :param protocol: Protocol of the connection. :type protocol: HathorProtocol @@ -68,61 +94,47 @@ def __init__(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None self._settings = get_settings() self.protocol = protocol self.manager = protocol.node - self.tx_storage = protocol.node.tx_storage + self.tx_storage: 'TransactionStorage' = protocol.node.tx_storage self.state = PeerState.UNKNOWN self.DEFAULT_STREAMING_LIMIT = DEFAULT_STREAMING_LIMIT - if reactor is None: - from hathor.util import reactor as twisted_reactor - reactor = twisted_reactor - assert reactor is not None self.reactor: Reactor = reactor self._is_streaming: bool = False # Create logger with context self.log = logger.new(peer=self.protocol.get_short_peer_id()) - # Extra - self._blk_size = 0 - self._blk_end_hash = self._settings.GENESIS_BLOCK_HASH - self._blk_max_quantity = 0 - # indicates whether we're receiving a stream from the peer self.receiving_stream = False # highest block where we are synced - self.synced_height = 0 + self.synced_block: Optional[_HeightInfo] = None # highest block peer has - self.peer_height = 0 + self.peer_best_block: Optional[_HeightInfo] = None # Latest deferred waiting for a reply. self._deferred_txs: dict[VertexId, Deferred[BaseTransaction]] = {} self._deferred_tips: Optional[Deferred[list[bytes]]] = None - self._deferred_best_block: Optional[Deferred[dict[str, Any]]] = None - self._deferred_peer_block_hashes: Optional[Deferred[list[tuple[int, bytes]]]] = None + self._deferred_best_block: Optional[Deferred[_HeightInfo]] = None + self._deferred_peer_block_hashes: Optional[Deferred[list[_HeightInfo]]] = None - # When syncing blocks we start streaming with all peers - # so the moment I get some repeated blocks, I stop the download - # because it's probably a streaming that I've just received - self.max_repeated_blocks = 10 + # Clients to handle streaming messages. + self._blk_streaming_client: Optional[BlockchainStreamingClient] = None + self._tx_streaming_client: Optional[TransactionStreamingClient] = None - # Streaming objects - self.blockchain_streaming: Optional[BlockchainStreaming] = None - self.transactions_streaming: Optional[TransactionsStreaming] = None + # Streaming server objects + self._blk_streaming_server: Optional[BlockchainStreamingServer] = None + self._tx_streaming_server: Optional[TransactionsStreamingServer] = None - # Whether the peers are synced, i.e. our best height and best block are the same + # Whether the peers are synced, i.e. we have the same best block. + # Notice that this flag ignores the mempool. self._synced = False # Indicate whether the sync manager has been started. self._started: bool = False - # Saves the last received block from the block streaming # this is useful to be used when running the sync of - # transactions in the case when I am downloading a side chain. Starts at the genesis, which is common to all - # peers on the network - self._last_received_block: Optional[Block] = None - # Saves if I am in the middle of a mempool sync # we don't execute any sync while in the middle of it self.mempool_manager = SyncMempoolManager(self) @@ -136,13 +148,14 @@ def __init__(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None self._lc_run = LoopingCall(self.run_sync) self._lc_run.clock = self.reactor self._is_running = False + self._sync_started_at: float = 0 + + # Maximum running time to consider a sync stale. + self.max_running_time: int = 30 * 60 # seconds # Whether we propagate transactions or not self._is_relaying = False - # This stores the final height that we expect the last "get blocks" stream to end on - self._blk_end_height: Optional[int] = None - # Whether to sync with this peer self._is_enabled: bool = False @@ -151,8 +164,8 @@ def get_status(self) -> dict[str, Any]: """ res = { 'is_enabled': self.is_sync_enabled(), - 'peer_height': self.peer_height, - 'synced_height': self.synced_height, + 'peer_best_block': self.peer_best_block.to_json() if self.peer_best_block else None, + 'synced_block': self.synced_block.to_json() if self.synced_block else None, 'synced': self._synced, 'state': self.state.value, } @@ -177,9 +190,6 @@ def send_tx_to_peer_if_possible(self, tx: BaseTransaction) -> None: if not self._is_enabled: self.log.debug('sync is disabled') return - if not self.is_synced(): - # XXX Should we accept any tx while I am not synced? - return # XXX When we start having many txs/s this become a performance issue # Then we could change this to be a streaming of real time data with @@ -202,7 +212,8 @@ def stop(self) -> None: if not self._started: raise Exception('NodeSyncBlock is already stopped') self._started = False - self._lc_run.stop() + if self._lc_run.running: + self._lc_run.stop() def get_cmd_dict(self) -> dict[ProtocolMessages, Callable[[str], None]]: """ Return a dict of messages of the plugin. @@ -222,6 +233,7 @@ def get_cmd_dict(self) -> dict[ProtocolMessages, Callable[[str], None]]: ProtocolMessages.GET_PEER_BLOCK_HASHES: self.handle_get_peer_block_hashes, ProtocolMessages.PEER_BLOCK_HASHES: self.handle_peer_block_hashes, ProtocolMessages.STOP_BLOCK_STREAMING: self.handle_stop_block_streaming, + ProtocolMessages.STOP_TRANSACTIONS_STREAMING: self.handle_stop_transactions_streaming, ProtocolMessages.GET_TIPS: self.handle_get_tips, ProtocolMessages.TIPS: self.handle_tips, ProtocolMessages.TIPS_END: self.handle_tips_end, @@ -238,7 +250,7 @@ def handle_not_found(self, payload: str) -> None: """ # XXX: NOT_FOUND is a valid message, but we shouldn't ever receive it unless the other peer is running with a # modified code or if there is a bug - self.log.warn('not found? close connection', payload=payload) + self.log.warn('vertex not found? close connection', payload=payload) self.protocol.send_error_and_close_connection('Unexpected NOT_FOUND') def handle_error(self, payload: str) -> None: @@ -251,6 +263,16 @@ def handle_error(self, payload: str) -> None: def update_synced(self, synced: bool) -> None: self._synced = synced + def watchdog(self) -> None: + """Close connection if sync is stale.""" + if not self._is_running: + return + + dt = self.reactor.seconds() - self._sync_started_at + if dt > self.max_running_time: + self.log.warn('stale syncing detected, closing connection') + self.protocol.send_error_and_close_connection('stale syncing') + @inlineCallbacks def run_sync(self) -> Generator[Any, Any, None]: """ Async step of the sync algorithm. @@ -263,10 +285,15 @@ def run_sync(self) -> Generator[Any, Any, None]: if self._is_running: # Already running... self.log.debug('already running') + self.watchdog() return self._is_running = True + self._sync_started_at = self.reactor.seconds() try: yield self._run_sync() + except Exception: + self.protocol.send_error_and_close_connection('internal error') + self.log.error('unhandled exception', exc_info=True) finally: self._is_running = False @@ -274,103 +301,110 @@ def run_sync(self) -> Generator[Any, Any, None]: def _run_sync(self) -> Generator[Any, Any, None]: """ Actual implementation of the sync step logic in run_sync. """ - if self.receiving_stream: - # If we're receiving a stream, wait for it to finish before running sync. - # If we're sending a stream, do the sync to update the peer's synced block - self.log.debug('receiving stream, try again later') - return + assert not self.receiving_stream + assert not self.mempool_manager.is_running() + assert self.protocol.connections is not None - if self.mempool_manager.is_running(): - # It's running a mempool sync, so we wait until it finishes - self.log.debug('running mempool sync, try again later') - return + is_block_synced = yield self.run_sync_blocks() + if is_block_synced: + # our blocks are synced, so sync the mempool + self.state = PeerState.SYNCING_MEMPOOL + yield self.mempool_manager.run() + def get_my_best_block(self) -> _HeightInfo: + """Return my best block info.""" bestblock = self.tx_storage.get_best_block() + assert bestblock.hash is not None meta = bestblock.get_metadata() + assert meta.validation.is_fully_connected() + return _HeightInfo(height=bestblock.get_height(), id=bestblock.hash) - self.log.debug('run sync', height=meta.height) - - assert self.protocol.connections is not None - assert self.tx_storage.indexes is not None - assert self.tx_storage.indexes.deps is not None - - if self.tx_storage.indexes.deps.has_needed_tx(): - self.log.debug('needed tx exist, sync transactions') - self.update_synced(False) - # TODO: find out whether we can sync transactions from this peer to speed things up - self.run_sync_transactions() - else: - # I am already in sync with all checkpoints, sync next blocks - yield self.run_sync_blocks() + @inlineCallbacks + def run_sync_blocks(self) -> Generator[Any, Any, bool]: + """Async step of the block syncing phase. Return True if we already have all other peer's blocks. - def run_sync_transactions(self) -> None: - """ Run a step of the transaction syncing phase. + Notice that we might already have all other peer's blocks while the other peer is still syncing. """ - self.state = PeerState.SYNCING_TRANSACTIONS - - assert self.protocol.connections is not None assert self.tx_storage.indexes is not None - assert self.tx_storage.indexes.deps is not None - - # start_hash = self.tx_storage.indexes.deps.get_next_needed_tx() - needed_txs, _ = collect_n(self.tx_storage.indexes.deps.iter_next_needed_txs(), - MAX_GET_TRANSACTIONS_BFS_LEN) - - # Start with the last received block and find the best block full validated in its chain - block = self._last_received_block - if block is None: - block = cast(Block, self.tx_storage.get_genesis(self._settings.GENESIS_BLOCK_HASH)) - else: - with self.tx_storage.allow_partially_validated_context(): - while not block.get_metadata().validation.is_valid(): - block = block.get_block_parent() - assert block is not None - assert block.hash is not None - block_height = block.get_height() + self.state = PeerState.SYNCING_BLOCKS - self.log.info('run sync transactions', start=[i.hex() for i in needed_txs], end_block_hash=block.hash.hex(), - end_block_height=block_height) - self.send_get_transactions_bfs(needed_txs, block.hash) + # Get my best block. + my_best_block = self.get_my_best_block() + + # Get peer's best block + self.peer_best_block = yield self.get_peer_best_block() + assert self.peer_best_block is not None + + # Are we synced? + if self.peer_best_block == my_best_block: + # Yes, we are synced! \o/ + if not self.is_synced(): + self.log.info('blocks are synced', best_block=my_best_block) + self.update_synced(True) + self.send_relay(enable=True) + self.synced_block = self.peer_best_block + return True + + # Not synced but same blockchain? + if self.peer_best_block.height <= my_best_block.height: + # Is peer behind me at the same blockchain? + common_block_hash = self.tx_storage.indexes.height.get(self.peer_best_block.height) + if common_block_hash == self.peer_best_block.id: + # If yes, nothing to sync from this peer. + if not self.is_synced(): + self.log.info('nothing to sync because peer is behind me at the same best blockchain', + my_best_block=my_best_block, peer_best_block=self.peer_best_block) + self.update_synced(True) + self.send_relay(enable=True) + self.synced_block = self.peer_best_block + return True + + # Ok. We have blocks to sync. + self.update_synced(False) + self.send_relay(enable=False) + + # Find best common block + self.synced_block = yield self.find_best_common_block(my_best_block, self.peer_best_block) + if self.synced_block is None: + # Find best common block failed. Try again soon. + # This might happen if a reorg occurs during the search. + self.log.debug('find_best_common_block failed.') + return False + + self.log.debug('starting to sync blocks', + my_best_block=my_best_block, + peer_best_block=self.peer_best_block, + synced_block=self.synced_block) + + # Sync from common block + try: + yield self.start_blockchain_streaming(self.synced_block, + self.peer_best_block) + except StreamingError as e: + self.log.info('block streaming failed', reason=repr(e)) + self.send_stop_block_streaming() + self.receiving_stream = False + return False + + assert self._blk_streaming_client is not None + partial_blocks = self._blk_streaming_client._partial_blocks + if partial_blocks: + self.state = PeerState.SYNCING_TRANSACTIONS + try: + reason = yield self.start_transactions_streaming(partial_blocks) + except StreamingError as e: + self.log.info('tx streaming failed', reason=repr(e)) + self.send_stop_transactions_streaming() + self.receiving_stream = False + return False - @inlineCallbacks - def run_sync_blocks(self) -> Generator[Any, Any, None]: - """ Async step of the block syncing phase. - """ - assert self.tx_storage.indexes is not None - self.state = PeerState.SYNCING_BLOCKS + self.log.info('tx streaming finished', reason=reason) + while reason == StreamEnd.LIMIT_EXCEEDED: + reason = yield self.resume_transactions_streaming() - # Find my height - bestblock = self.tx_storage.get_best_block() - assert bestblock.hash is not None - meta = bestblock.get_metadata() - my_height = meta.height - - self.log.debug('run sync blocks', my_height=my_height) - - # Find best block - data = yield self.get_peer_best_block() - peer_best_block = data['block'] - peer_best_height = data['height'] - self.peer_height = peer_best_height - - # find best common block - yield self.find_best_common_block(peer_best_height, peer_best_block) - self.log.debug('run_sync_blocks', peer_height=self.peer_height, synced_height=self.synced_height) - - if self.synced_height < self.peer_height: - # sync from common block - peer_block_at_height = yield self.get_peer_block_hashes([self.synced_height]) - self.run_block_sync(peer_block_at_height[0][1], self.synced_height, peer_best_block, peer_best_height) - elif my_height == self.synced_height == self.peer_height: - # we're synced and on the same height, get their mempool - self.state = PeerState.SYNCING_MEMPOOL - self.mempool_manager.run() - elif self._is_relaying: - # TODO: validate if this is when we should disable relaying - self.send_relay(enable=False) - else: - # we got all the peer's blocks but aren't on the same height, nothing to do - pass + self._blk_streaming_client = None + self._tx_streaming_client = None + return False def get_tips(self) -> Deferred[list[bytes]]: """ Async method to request the remote peer's tips. @@ -400,8 +434,9 @@ def handle_get_tips(self, _payload: str) -> None: return self.log.debug('handle_get_tips') # TODO Use a streaming of tips - for txid in self.tx_storage.indexes.mempool_tips.get(): - self.send_tips(txid) + for tx_id in self.tx_storage.indexes.mempool_tips.get(): + self.send_tips(tx_id) + self.log.debug('tips end') self.send_message(ProtocolMessages.TIPS_END) def send_tips(self, tx_id: bytes) -> None: @@ -453,32 +488,25 @@ def handle_relay(self, payload: str) -> None: self.protocol.send_error_and_close_connection('RELAY: invalid value') return - def _setup_block_streaming(self, start_hash: bytes, start_height: int, end_hash: bytes, end_height: int, - reverse: bool) -> None: - """ Common setup before starting an outgoing block stream. - """ - self._blk_start_hash = start_hash - self._blk_start_height = start_height - self._blk_end_hash = end_hash - self._blk_end_height = end_height - self._blk_received = 0 - self._blk_repeated = 0 - raw_quantity = end_height - start_height + 1 - self._blk_max_quantity = -raw_quantity if reverse else raw_quantity - self._blk_prev_hash: Optional[bytes] = None - self._blk_stream_reverse = reverse - self._last_received_block = None - - def run_block_sync(self, start_hash: bytes, start_height: int, end_hash: bytes, end_height: int) -> None: - """ Called when the bestblock is after all checkpoints. - - It must syncs to the left until it reaches the remote's best block or the max stream limit. - """ - self._setup_block_streaming(start_hash, start_height, end_hash, end_height, False) - quantity = end_height - start_height - self.log.info('get next blocks', start_height=start_height, end_height=end_height, quantity=quantity, - start_hash=start_hash.hex(), end_hash=end_hash.hex()) - self.send_get_next_blocks(start_hash, end_hash) + def start_blockchain_streaming(self, + start_block: _HeightInfo, + end_block: _HeightInfo) -> Deferred[StreamEnd]: + """Request peer to start streaming blocks to us.""" + self._blk_streaming_client = BlockchainStreamingClient(self, start_block, end_block) + quantity = self._blk_streaming_client._blk_max_quantity + self.log.info('requesting blocks streaming', + start_block=start_block, + end_block=end_block, + quantity=quantity) + self.send_get_next_blocks(start_block.id, end_block.id, quantity) + return self._blk_streaming_client.wait() + + def stop_blk_streaming_server(self, response_code: StreamEnd) -> None: + """Stop blockchain streaming server.""" + assert self._blk_streaming_server is not None + self._blk_streaming_server.stop() + self._blk_streaming_server = None + self.send_blocks_end(response_code) def send_message(self, cmd: ProtocolMessages, payload: Optional[str] = None) -> None: """ Helper to send a message. @@ -493,68 +521,73 @@ def partial_vertex_exists(self, vertex_id: VertexId) -> bool: return self.tx_storage.transaction_exists(vertex_id) @inlineCallbacks - def find_best_common_block(self, peer_best_height: int, peer_best_block: bytes) -> Generator[Any, Any, None]: + def find_best_common_block(self, + my_best_block: _HeightInfo, + peer_best_block: _HeightInfo) -> Generator[Any, Any, Optional[_HeightInfo]]: """ Search for the highest block/height where we're synced. """ - assert self.tx_storage.indexes is not None - my_best_height = self.tx_storage.get_height_best_block() - - self.log.debug('find common chain', peer_height=peer_best_height, my_height=my_best_height) - - if peer_best_height <= my_best_height: - my_block = self.tx_storage.indexes.height.get(peer_best_height) - if my_block == peer_best_block: - # we have all the peer's blocks - if peer_best_height == my_best_height: - # We are in sync, ask for relay so the remote sends transactions in real time - self.update_synced(True) - self.send_relay() - else: - self.update_synced(False) - - self.log.debug('synced to the latest peer block', height=peer_best_height) - self.synced_height = peer_best_height - return - else: - # TODO peer is on a different best chain - self.log.warn('peer on different chain', peer_height=peer_best_height, - peer_block=peer_best_block.hex(), my_block=(my_block.hex() if my_block is not None else - None)) - - self.update_synced(False) - not_synced = min(peer_best_height, my_best_height) - synced = self.synced_height - - while not_synced - synced > 1: - self.log.debug('find_best_common_block synced not_synced', synced=synced, not_synced=not_synced) - step = math.ceil((not_synced - synced)/10) - heights = [] - height = synced - while height < not_synced: - heights.append(height) - height += step - heights.append(not_synced) - block_height_list = yield self.get_peer_block_hashes(heights) - block_height_list.reverse() - for height, block_hash in block_height_list: + self.log.debug('find_best_common_block', peer_best_block=peer_best_block, my_best_block=my_best_block) + + # Run an n-ary search in the interval [lo, hi). + # `lo` is always a height where we are synced. + # `hi` is always a height where sync state is unknown. + hi = min(peer_best_block, my_best_block, key=lambda x: x.height) + lo = _HeightInfo(height=0, id=self._settings.GENESIS_BLOCK_HASH) + + while hi.height - lo.height > 1: + self.log.debug('find_best_common_block n-ary search query', lo=lo, hi=hi) + step = math.ceil((hi.height - lo.height) / 10) + heights = list(range(lo.height, hi.height, step)) + heights.append(hi.height) + + block_info_list = yield self.get_peer_block_hashes(heights) + block_info_list.sort(key=lambda x: x.height, reverse=True) + + # As we are supposed to be always synced at `lo`, we expect to receive a response + # with at least one item equals to lo. If it does not happen, we stop the search + # and return None. This might be caused when a reorg occurs during the search. + if not block_info_list: + self.log.info('n-ary search failed because it got a response with no lo_block_info', + lo=lo, + hi=hi) + return None + lo_block_info = block_info_list[-1] + if lo_block_info != lo: + self.log.info('n-ary search failed because lo != lo_block_info', + lo=lo, + hi=hi, + lo_block_info=lo_block_info) + return None + + for info in block_info_list: try: # We must check only fully validated transactions. - blk = self.tx_storage.get_transaction(block_hash) + blk = self.tx_storage.get_transaction(info.id) + except TransactionDoesNotExist: + hi = info + else: assert blk.get_metadata().validation.is_fully_connected() assert isinstance(blk, Block) - if height != blk.get_height(): - # WTF?! It should never happen. - self.state = PeerState.ERROR - return - synced = height + assert info.height == blk.get_height() + lo = info break - except TransactionDoesNotExist: - not_synced = height - self.log.debug('find_best_common_block finished synced not_synced', synced=synced, not_synced=not_synced) - self.synced_height = synced + self.log.debug('find_best_common_block n-ary search finished', lo=lo, hi=hi) + return lo - def get_peer_block_hashes(self, heights: list[int]) -> Deferred[list[tuple[int, bytes]]]: + @inlineCallbacks + def on_block_complete(self, blk: Block, vertex_list: list[BaseTransaction]) -> Generator[Any, Any, None]: + """This method is called when a block and its transactions are downloaded.""" + # Note: Any vertex and block could have already been added by another concurrent syncing peer. + for tx in vertex_list: + if not self.tx_storage.transaction_exists(not_none(tx.hash)): + self.manager.on_new_tx(tx, propagate_to_peers=False, fails_silently=False) + yield deferLater(self.reactor, 0, lambda: None) + + if not self.tx_storage.transaction_exists(not_none(blk.hash)): + self.manager.on_new_tx(blk, propagate_to_peers=False, fails_silently=False) + + def get_peer_block_hashes(self, heights: list[int]) -> Deferred[list[_HeightInfo]]: """ Returns the peer's block hashes in the given heights. """ if self._deferred_peer_block_hashes is not None: @@ -575,6 +608,7 @@ def handle_get_peer_block_hashes(self, payload: str) -> None: assert self.tx_storage.indexes is not None heights = json.loads(payload) if len(heights) > 20: + self.log.info('too many heights', heights_qty=len(heights)) self.protocol.send_error_and_close_connection('GET-PEER-BLOCK-HASHES: too many heights') return data = [] @@ -584,10 +618,7 @@ def handle_get_peer_block_hashes(self, payload: str) -> None: break blk = self.tx_storage.get_transaction(blk_hash) if blk.get_metadata().voided_by: - # The height index might have voided blocks when there is a draw. - # Let's try again soon. - self.reactor.callLater(3, self.handle_get_peer_block_hashes, payload) - return + break data.append((h, blk_hash.hex())) payload = json.dumps(data) self.send_message(ProtocolMessages.PEER_BLOCK_HASHES, payload) @@ -596,62 +627,68 @@ def handle_peer_block_hashes(self, payload: str) -> None: """ Handle a PEER-BLOCK-HASHES message. """ data = json.loads(payload) - data = [(h, bytes.fromhex(block_hash)) for (h, block_hash) in data] + data = [_HeightInfo(height=h, id=bytes.fromhex(block_hash)) for (h, block_hash) in data] deferred = self._deferred_peer_block_hashes self._deferred_peer_block_hashes = None if deferred: deferred.callback(data) - def send_get_next_blocks(self, start_hash: bytes, end_hash: bytes) -> None: + def send_get_next_blocks(self, start_hash: bytes, end_hash: bytes, quantity: int) -> None: """ Send a PEER-BLOCK-HASHES message. """ - payload = json.dumps(dict( - start_hash=start_hash.hex(), - end_hash=end_hash.hex(), - )) - self.send_message(ProtocolMessages.GET_NEXT_BLOCKS, payload) + payload = GetNextBlocksPayload( + start_hash=start_hash, + end_hash=end_hash, + quantity=quantity, + ) + self.send_message(ProtocolMessages.GET_NEXT_BLOCKS, payload.json()) self.receiving_stream = True def handle_get_next_blocks(self, payload: str) -> None: """ Handle a GET-NEXT-BLOCKS message. """ - self.log.debug('handle GET-NEXT-BLOCKS') + self.log.debug('handle GET-NEXT-BLOCKS', payload=payload) if self._is_streaming: self.protocol.send_error_and_close_connection('GET-NEXT-BLOCKS received before previous one finished') return - data = json.loads(payload) + data = GetNextBlocksPayload.parse_raw(payload) + start_block = self._validate_block(data.start_hash) + if start_block is None: + return + end_block = self._validate_block(data.end_hash) + if end_block is None: + return self.send_next_blocks( - start_hash=bytes.fromhex(data['start_hash']), - end_hash=bytes.fromhex(data['end_hash']), + start_block=start_block, + end_hash=data.end_hash, + quantity=data.quantity, ) - def send_next_blocks(self, start_hash: bytes, end_hash: bytes) -> None: + def _validate_block(self, _hash: VertexId) -> Optional[Block]: + """Validate block given in the GET-NEXT-BLOCKS and GET-TRANSACTIONS-BFS messages.""" + try: + blk = self.tx_storage.get_transaction(_hash) + except TransactionDoesNotExist: + self.log.debug('requested block not found', blk_id=_hash.hex()) + self.send_message(ProtocolMessages.NOT_FOUND, _hash.hex()) + return None + + if not isinstance(blk, Block): + self.log.debug('request block is not a block', blk_id=_hash.hex()) + self.send_message(ProtocolMessages.NOT_FOUND, _hash.hex()) + return None + + return blk + + def send_next_blocks(self, start_block: Block, end_hash: bytes, quantity: int) -> None: """ Send a NEXT-BLOCKS message. """ self.log.debug('start NEXT-BLOCKS stream') - try: - blk = self.tx_storage.get_transaction(start_hash) - except TransactionDoesNotExist: - # In case the tx does not exist we send a NOT-FOUND message - self.log.debug('requested start_hash not found', start_hash=start_hash.hex()) - self.send_message(ProtocolMessages.NOT_FOUND, start_hash.hex()) - return - assert isinstance(blk, Block) - assert blk.hash is not None - # XXX: it is not an error for the other peer to request a voided block, we'll pretend it doesn't exist, butf - blk_meta = blk.get_metadata() - if blk_meta.voided_by: - # In case the tx does not exist we send a NOT-FOUND message - self.log.debug('requested start_hash is voided, continue anyway', start_hash=start_hash.hex(), - voided_by=[i.hex() for i in blk_meta.voided_by]) - # XXX: we want to be able to not send this, but we do because the remote node could get stuck otherwise - # (tracked by issue #711) - # self.send_message(ProtocolMessages.NOT_FOUND, start_hash.hex()) - # return - if self.blockchain_streaming is not None and self.blockchain_streaming.is_running: - self.blockchain_streaming.stop() - self.blockchain_streaming = BlockchainStreaming(self, blk, end_hash, limit=self.DEFAULT_STREAMING_LIMIT) - self.blockchain_streaming.start() + if self._blk_streaming_server is not None and self._blk_streaming_server.is_running: + self.stop_blk_streaming_server(StreamEnd.PER_REQUEST) + limit = min(quantity, self.DEFAULT_STREAMING_LIMIT) + self._blk_streaming_server = BlockchainStreamingServer(self, start_block, end_hash, limit=limit) + self._blk_streaming_server.start() def send_blocks(self, blk: Block) -> None: """ Send a BLOCKS message. @@ -677,7 +714,7 @@ def handle_blocks_end(self, payload: str) -> None: This is important to know that the other peer will not send any BLOCKS messages anymore as a response to a previous command. """ - self.log.debug('recv BLOCKS-END', payload=payload, size=self._blk_size) + self.log.debug('recv BLOCKS-END', payload=payload) response_code = StreamEnd(int(payload)) self.receiving_stream = False @@ -688,6 +725,8 @@ def handle_blocks_end(self, payload: str) -> None: self.protocol.send_error_and_close_connection('Not expecting to receive BLOCKS-END message') return + assert self._blk_streaming_client is not None + self._blk_streaming_client.handle_blocks_end(response_code) self.log.debug('block streaming ended', reason=str(response_code)) def handle_blocks(self, payload: str) -> None: @@ -706,71 +745,10 @@ def handle_blocks(self, payload: str) -> None: # Not a block. Punish peer? return blk.storage = self.tx_storage - assert blk.hash is not None - self._blk_received += 1 - if self._blk_received > self._blk_max_quantity + 1: - self.log.warn('too many blocks received', last_block=blk.hash_hex) - # Too many blocks. Punish peer? - self.state = PeerState.ERROR - return - - if self.partial_vertex_exists(blk.hash): - # We reached a block we already have. Skip it. - self._blk_prev_hash = blk.hash - self._blk_repeated += 1 - if self.receiving_stream and self._blk_repeated > self.max_repeated_blocks: - self.log.debug('repeated block received', total_repeated=self._blk_repeated) - self.handle_many_repeated_blocks() - - # basic linearity validation, crucial for correctly predicting the next block's height - if self._blk_stream_reverse: - if self._last_received_block and blk.hash != self._last_received_block.get_block_parent_hash(): - self.handle_invalid_block('received block is not parent of previous block') - return - else: - if self._last_received_block and blk.get_block_parent_hash() != self._last_received_block.hash: - self.handle_invalid_block('received block is not child of previous block') - return - - try: - # this methods takes care of checking if the block already exists, - # it will take care of doing at least a basic validation - # self.log.debug('add new block', block=blk.hash_hex) - if self.partial_vertex_exists(blk.hash): - # XXX: early terminate? - self.log.debug('block early terminate?', blk_id=blk.hash.hex()) - else: - self.log.debug('block received', blk_id=blk.hash.hex()) - self.on_new_tx(blk, propagate_to_peers=False, quiet=True) - except HathorError: - self.handle_invalid_block(exc_info=True) - return - else: - self._last_received_block = blk - self._blk_repeated = 0 - # XXX: debugging log, maybe add timing info - if self._blk_received % 500 == 0: - self.log.debug('block streaming in progress', blocks_received=self._blk_received) - - def handle_invalid_block(self, msg: Optional[str] = None, *, exc_info: bool = False) -> None: - """ Call this method when receiving an invalid block. - """ - kwargs: dict[str, Any] = {} - if msg is not None: - kwargs['error'] = msg - if exc_info: - kwargs['exc_info'] = True - self.log.warn('invalid new block', **kwargs) - # Invalid block?! - self.state = PeerState.ERROR - - def handle_many_repeated_blocks(self) -> None: - """ Call this when a stream sends too many blocks in sequence that we already have. - """ - self.send_stop_block_streaming() - self.receiving_stream = False + assert self._blk_streaming_client is not None + self._blk_streaming_client.handle_blocks(blk) def send_stop_block_streaming(self) -> None: """ Send a STOP-BLOCK-STREAMING message. @@ -784,15 +762,33 @@ def handle_stop_block_streaming(self, payload: str) -> None: This means the remote peer wants to stop the current block stream. """ - if not self.blockchain_streaming or not self._is_streaming: + if not self._blk_streaming_server or not self._is_streaming: + self.log.debug('got stop streaming message with no streaming running') + return + + self.log.debug('got stop streaming message') + self.stop_blk_streaming_server(StreamEnd.PER_REQUEST) + + def send_stop_transactions_streaming(self) -> None: + """ Send a STOP-TRANSACTIONS-STREAMING message. + + This asks the other peer to stop a running block stream. + """ + self.send_message(ProtocolMessages.STOP_TRANSACTIONS_STREAMING) + + def handle_stop_transactions_streaming(self, payload: str) -> None: + """ Handle a STOP-TRANSACTIONS-STREAMING message. + + This means the remote peer wants to stop the current block stream. + """ + if not self._tx_streaming_server or not self._is_streaming: self.log.debug('got stop streaming message with no streaming running') return self.log.debug('got stop streaming message') - self.blockchain_streaming.stop() - self.blockchain_streaming = None + self.stop_tx_streaming_server(StreamEnd.PER_REQUEST) - def get_peer_best_block(self) -> Deferred[dict[str, Any]]: + def get_peer_best_block(self) -> Deferred[_HeightInfo]: """ Async call to get the remote peer's best block. """ if self._deferred_best_block is not None: @@ -807,35 +803,72 @@ def send_get_best_block(self) -> None: """ self.send_message(ProtocolMessages.GET_BEST_BLOCK) - def handle_get_best_block(self, payload: str) -> None: + def handle_get_best_block(self, _payload: str) -> None: """ Handle a GET-BEST-BLOCK message. """ best_block = self.tx_storage.get_best_block() meta = best_block.get_metadata() - data = {'block': best_block.hash_hex, 'height': meta.height} - self.send_message(ProtocolMessages.BEST_BLOCK, json.dumps(data)) + assert meta.validation.is_fully_connected() + payload = BestBlockPayload( + block=not_none(best_block.hash), + height=not_none(meta.height), + ) + self.send_message(ProtocolMessages.BEST_BLOCK, payload.json()) def handle_best_block(self, payload: str) -> None: """ Handle a BEST-BLOCK message. """ - data = json.loads(payload) - assert self.protocol.connections is not None - self.log.debug('got best block', **data) - data['block'] = bytes.fromhex(data['block']) + data = BestBlockPayload.parse_raw(payload) + best_block = _HeightInfo(height=data.height, id=data.block) deferred = self._deferred_best_block self._deferred_best_block = None if deferred: - deferred.callback(data) - - def _setup_tx_streaming(self): - """ Common setup before starting an outgoing transaction stream. - """ - self._tx_received = 0 - self._tx_max_quantity = DEFAULT_STREAMING_LIMIT # XXX: maybe this is redundant - # XXX: what else can we add for checking if everything is going well? - - def send_get_transactions_bfs(self, start_from: list[bytes], until_first_block: bytes) -> None: + deferred.callback(best_block) + + def start_transactions_streaming(self, partial_blocks: list[Block]) -> Deferred[StreamEnd]: + """Request peer to start streaming transactions to us.""" + self._tx_streaming_client = TransactionStreamingClient(self, + partial_blocks, + limit=self.DEFAULT_STREAMING_LIMIT) + + start_from: list[bytes] = [] + first_block_hash = not_none(partial_blocks[0].hash) + last_block_hash = not_none(partial_blocks[-1].hash) + self.log.info('requesting transactions streaming', + start_from=[x.hex() for x in start_from], + first_block=first_block_hash.hex(), + last_block=last_block_hash.hex()) + self.send_get_transactions_bfs(start_from, first_block_hash, last_block_hash) + return self._tx_streaming_client.wait() + + def resume_transactions_streaming(self) -> Deferred[StreamEnd]: + """Resume transaction streaming.""" + assert self._tx_streaming_client is not None + idx = self._tx_streaming_client._idx + partial_blocks = self._tx_streaming_client.partial_blocks[idx:] + assert partial_blocks + start_from = list(self._tx_streaming_client._waiting_for) + first_block_hash = not_none(partial_blocks[0].hash) + last_block_hash = not_none(partial_blocks[-1].hash) + self.log.info('requesting transactions streaming', + start_from=[x.hex() for x in start_from], + first_block=first_block_hash.hex(), + last_block=last_block_hash.hex()) + self.send_get_transactions_bfs(start_from, first_block_hash, last_block_hash) + return self._tx_streaming_client.resume() + + def stop_tx_streaming_server(self, response_code: StreamEnd) -> None: + """Stop transaction streaming server.""" + assert self._tx_streaming_server is not None + self._tx_streaming_server.stop() + self._tx_streaming_server = None + self.send_transactions_end(response_code) + + def send_get_transactions_bfs(self, + start_from: list[bytes], + first_block_hash: bytes, + last_block_hash: bytes) -> None: """ Send a GET-TRANSACTIONS-BFS message. This will request a BFS of all transactions starting from start_from list and walking back into parents/inputs. @@ -847,15 +880,19 @@ def send_get_transactions_bfs(self, start_from: list[bytes], until_first_block: height of until_first_block. The other peer will return an empty response if it doesn't have any of the transactions in start_from or if it doesn't have the until_first_block block. """ - self._setup_tx_streaming() start_from_hexlist = [tx.hex() for tx in start_from] - until_first_block_hex = until_first_block.hex() - self.log.debug('send_get_transactions_bfs', start_from=start_from_hexlist, last_block=until_first_block_hex) - payload = json.dumps(dict( - start_from=start_from_hexlist, - until_first_block=until_first_block_hex, - )) - self.send_message(ProtocolMessages.GET_TRANSACTIONS_BFS, payload) + first_block_hash_hex = first_block_hash.hex() + last_block_hash_hex = last_block_hash.hex() + self.log.debug('send_get_transactions_bfs', + start_from=start_from_hexlist, + first_block_hash=first_block_hash_hex, + last_block_hash=last_block_hash_hex) + payload = GetTransactionsBFSPayload( + start_from=start_from, + first_block_hash=first_block_hash, + last_block_hash=last_block_hash, + ) + self.send_message(ProtocolMessages.GET_TRANSACTIONS_BFS, payload.json()) self.receiving_stream = True def handle_get_transactions_bfs(self, payload: str) -> None: @@ -864,40 +901,58 @@ def handle_get_transactions_bfs(self, payload: str) -> None: if self._is_streaming: self.log.warn('ignore GET-TRANSACTIONS-BFS, already streaming') return - data = json.loads(payload) - # XXX: todo verify this limit while parsing the payload. - start_from = data['start_from'] - if len(start_from) > MAX_GET_TRANSACTIONS_BFS_LEN: + data = GetTransactionsBFSPayload.parse_raw(payload) + + if len(data.start_from) > MAX_GET_TRANSACTIONS_BFS_LEN: self.log.error('too many transactions in GET-TRANSACTIONS-BFS', state=self.state) self.protocol.send_error_and_close_connection('Too many transactions in GET-TRANSACTIONS-BFS') return - self.log.debug('handle_get_transactions_bfs', **data) - start_from = [bytes.fromhex(tx_hash_hex) for tx_hash_hex in start_from] - until_first_block = bytes.fromhex(data['until_first_block']) - self.send_transactions_bfs(start_from, until_first_block) - def send_transactions_bfs(self, start_from: list[bytes], until_first_block: bytes) -> None: - """ Start a transactions BFS stream. - """ + first_block = self._validate_block(data.first_block_hash) + if first_block is None: + return + + last_block = self._validate_block(data.last_block_hash) + if last_block is None: + return + start_from_txs = [] - for start_from_hash in start_from: + for start_from_hash in data.start_from: try: - start_from_txs.append(self.tx_storage.get_transaction(start_from_hash)) + tx = self.tx_storage.get_transaction(start_from_hash) except TransactionDoesNotExist: # In case the tx does not exist we send a NOT-FOUND message self.log.debug('requested start_from_hash not found', start_from_hash=start_from_hash.hex()) self.send_message(ProtocolMessages.NOT_FOUND, start_from_hash.hex()) return - if not self.tx_storage.transaction_exists(until_first_block): - # In case the tx does not exist we send a NOT-FOUND message - self.log.debug('requested until_first_block not found', until_first_block=until_first_block.hex()) - self.send_message(ProtocolMessages.NOT_FOUND, until_first_block.hex()) - return - if self.transactions_streaming is not None and self.transactions_streaming.is_running: - self.transactions_streaming.stop() - self.transactions_streaming = TransactionsStreaming(self, start_from_txs, until_first_block, - limit=self.DEFAULT_STREAMING_LIMIT) - self.transactions_streaming.start() + assert tx.hash is not None + assert first_block.hash is not None + meta = tx.get_metadata() + if meta.first_block != first_block.hash: + self.log.debug('requested start_from not confirmed by first_block', + vertex_id=tx.hash.hex(), + first_block=first_block.hash.hex(), + vertex_first_block=meta.first_block) + self.send_transactions_end(StreamEnd.INVALID_PARAMS) + return + start_from_txs.append(tx) + + self.send_transactions_bfs(start_from_txs, first_block, last_block) + + def send_transactions_bfs(self, + start_from: list[BaseTransaction], + first_block: Block, + last_block: Block) -> None: + """ Start a transactions BFS stream. + """ + if self._tx_streaming_server is not None and self._tx_streaming_server.is_running: + self.stop_tx_streaming_server(StreamEnd.PER_REQUEST) + self._tx_streaming_server = TransactionsStreamingServer(self, + start_from, + first_block, + last_block, + limit=self.DEFAULT_STREAMING_LIMIT) + self._tx_streaming_server.start() def send_transaction(self, tx: Transaction) -> None: """ Send a TRANSACTION message. @@ -916,7 +971,7 @@ def send_transactions_end(self, response_code: StreamEnd) -> None: def handle_transactions_end(self, payload: str) -> None: """ Handle a TRANSACTIONS-END message. """ - self.log.debug('recv TRANSACTIONS-END', payload=payload, size=self._blk_size) + self.log.debug('recv TRANSACTIONS-END', payload=payload) response_code = StreamEnd(int(payload)) self.receiving_stream = False @@ -927,6 +982,8 @@ def handle_transactions_end(self, payload: str) -> None: self.protocol.send_error_and_close_connection('Not expecting to receive TRANSACTIONS-END message') return + assert self._tx_streaming_client is not None + self._tx_streaming_client.handle_transactions_end(response_code) self.log.debug('transaction streaming ended', reason=str(response_code)) def handle_transaction(self, payload: str) -> None: @@ -942,34 +999,10 @@ def handle_transaction(self, payload: str) -> None: self.log.warn('not a transaction', hash=tx.hash_hex) # Not a transaction. Punish peer? return + tx.storage = self.tx_storage - self._tx_received += 1 - if self._tx_received > self._tx_max_quantity + 1: - self.log.warn('too many txs received') - self.state = PeerState.ERROR - return - - try: - # this methods takes care of checking if the tx already exists, it will take care of doing at least - # a basic validation - # self.log.debug('add new tx', tx=tx.hash_hex) - if self.partial_vertex_exists(tx.hash): - # XXX: early terminate? - self.log.debug('tx early terminate?', tx_id=tx.hash.hex()) - else: - self.log.debug('tx received', tx_id=tx.hash.hex()) - self.on_new_tx(tx, propagate_to_peers=False, quiet=True, reject_locked_reward=True) - except HathorError: - self.log.warn('invalid new tx', exc_info=True) - # Invalid block?! - # Invalid transaction?! - # Maybe stop syncing and punish peer. - self.state = PeerState.ERROR - return - else: - # XXX: debugging log, maybe add timing info - if self._tx_received % 100 == 0: - self.log.debug('tx streaming in progress', txs_received=self._tx_received) + assert self._tx_streaming_client is not None + self._tx_streaming_client.handle_transaction(tx) @inlineCallbacks def get_tx(self, tx_id: bytes) -> Generator[Deferred, Any, BaseTransaction]: @@ -1104,76 +1137,8 @@ def handle_data(self, payload: str) -> None: # If we have not requested the data, it is a new transaction being propagated # in the network, thus, we propagate it as well. if tx.can_validate_full(): - self.log.info('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) - self.on_new_tx(tx, propagate_to_peers=True) + self.log.debug('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) + self.manager.on_new_tx(tx, propagate_to_peers=True) else: - self.log.info('skipping tx received in real time from peer', - tx=tx.hash_hex, peer=self.protocol.get_peer_id()) - - def on_new_tx(self, tx: BaseTransaction, *, quiet: bool = False, propagate_to_peers: bool = True, - sync_checkpoints: bool = False, reject_locked_reward: bool = True) -> bool: - """ This method handle everything related to adding potentially partially validated transactions. - - Call this instead of HathorManager.on_new_tx, unless `tx` must be fully validated (for example when receiving - realtime DATA pushes). - """ - - assert self.tx_storage.indexes is not None - assert tx.hash is not None - - # XXX: "refresh" the transaction so there isn't a duplicate in memory - if self.partial_vertex_exists(tx.hash): - with self.tx_storage.allow_partially_validated_context(): - self.tx_storage.compare_bytes_with_local_tx(tx) - tx = self.tx_storage.get_transaction(tx.hash) - assert tx.hash is not None - - tx.storage = self.tx_storage - - with self.tx_storage.allow_partially_validated_context(): - metadata = tx.get_metadata() - - if metadata.validation.is_fully_connected() or tx.can_validate_full(): - if not self.manager.on_new_tx(tx): - return False - elif sync_checkpoints: - assert self.tx_storage.indexes.deps is not None - with self.tx_storage.allow_partially_validated_context(): - metadata.children = self.tx_storage.indexes.deps.known_children(tx) - try: - tx.validate_checkpoint(self.manager.checkpoints) - except HathorError: - self.log.warn('on_new_tx(): checkpoint validation failed', tx=tx.hash_hex, exc_info=True) - return False - self.tx_storage.save_transaction(tx) - self.tx_storage.indexes.deps.add_tx(tx) - self.manager.log_new_object(tx, 'new {} partially accepted while syncing checkpoints', quiet=quiet) - else: - assert self.tx_storage.indexes.deps is not None - with self.tx_storage.allow_partially_validated_context(): - if isinstance(tx, Block) and not tx.has_basic_block_parent(): - self.log.warn('on_new_tx(): block parent needs to be at least basic-valid', tx=tx.hash_hex) - return False - if not self.manager.verification_service.validate_basic(tx): - self.log.warn('on_new_tx(): basic validation failed', tx=tx.hash_hex) - return False - - # The method below adds the tx as a child of the parents - # This needs to be called right before the save because we were adding the children - # in the tx parents even if the tx was invalid (failing the verifications above) - # then I would have a children that was not in the storage - self.tx_storage.save_transaction(tx) - self.tx_storage.indexes.deps.add_tx(tx) - self.manager.log_new_object(tx, 'new {} partially accepted', quiet=quiet) - - if self.tx_storage.indexes.deps is not None: - self.tx_storage.indexes.deps.remove_from_needed_index(tx.hash) - - if self.tx_storage.indexes.deps is not None: - try: - self.manager.sync_v2_step_validations([tx], quiet=quiet) - except (AssertionError, HathorError): - self.log.warn('on_new_tx(): step validations failed', tx=tx.hash_hex, exc_info=True) - return False - - return True + self.log.debug('skipping tx received in real time from peer', + tx=tx.hash_hex, peer=self.protocol.get_peer_id()) diff --git a/hathor/p2p/sync_v2/blockchain_streaming_client.py b/hathor/p2p/sync_v2/blockchain_streaming_client.py new file mode 100644 index 000000000..3635396b9 --- /dev/null +++ b/hathor/p2p/sync_v2/blockchain_streaming_client.py @@ -0,0 +1,153 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Optional + +from structlog import get_logger +from twisted.internet.defer import Deferred + +from hathor.p2p.sync_v2.exception import ( + BlockNotConnectedToPreviousBlock, + InvalidVertexError, + StreamingError, + TooManyRepeatedVerticesError, + TooManyVerticesReceivedError, +) +from hathor.p2p.sync_v2.streamers import StreamEnd +from hathor.transaction import Block +from hathor.transaction.exceptions import HathorError +from hathor.types import VertexId + +if TYPE_CHECKING: + from hathor.p2p.sync_v2.agent import NodeBlockSync, _HeightInfo + +logger = get_logger() + + +class BlockchainStreamingClient: + def __init__(self, sync_agent: 'NodeBlockSync', start_block: '_HeightInfo', end_block: '_HeightInfo') -> None: + self.sync_agent = sync_agent + self.protocol = self.sync_agent.protocol + self.tx_storage = self.sync_agent.tx_storage + self.manager = self.sync_agent.manager + + self.log = logger.new(peer=self.protocol.get_short_peer_id()) + + self.start_block = start_block + self.end_block = end_block + + # When syncing blocks we start streaming with all peers + # so the moment I get some repeated blocks, I stop the download + # because it's probably a streaming that I've already received + self.max_repeated_blocks = 10 + + self._deferred: Deferred[StreamEnd] = Deferred() + + self._blk_received: int = 0 + self._blk_repeated: int = 0 + + self._blk_max_quantity = self.end_block.height - self.start_block.height + 1 + self._reverse: bool = False + if self._blk_max_quantity < 0: + self._blk_max_quantity = -self._blk_max_quantity + self._reverse = True + + self._last_received_block: Optional[Block] = None + + self._partial_blocks: list[Block] = [] + + def wait(self) -> Deferred[StreamEnd]: + """Return the deferred.""" + return self._deferred + + def fails(self, reason: 'StreamingError') -> None: + """Fail the execution by resolving the deferred with an error.""" + self._deferred.errback(reason) + + def partial_vertex_exists(self, vertex_id: VertexId) -> bool: + """Return true if the vertex exists no matter its validation state.""" + with self.tx_storage.allow_partially_validated_context(): + return self.tx_storage.transaction_exists(vertex_id) + + def handle_blocks(self, blk: Block) -> None: + """This method is called by the sync agent when a BLOCKS message is received.""" + if self._deferred.called: + return + + self._blk_received += 1 + if self._blk_received > self._blk_max_quantity: + self.log.warn('too many blocks received', + blk_received=self._blk_received, + blk_max_quantity=self._blk_max_quantity) + self.fails(TooManyVerticesReceivedError()) + return + + # TODO Run basic verification. We will uncomment these lines after we finish + # refactoring our verification services. + # + # if not blk.is_genesis: + # try: + # self.manager.verification_service.validate_basic(blk) + # except TxValidationError as e: + # self.fails(InvalidVertexError(repr(e))) + # return + + # Check for repeated blocks. + assert blk.hash is not None + is_duplicated = False + if self.partial_vertex_exists(blk.hash): + # We reached a block we already have. Skip it. + self._blk_repeated += 1 + is_duplicated = True + if self._blk_repeated > self.max_repeated_blocks: + self.log.info('too many repeated block received', total_repeated=self._blk_repeated) + self.fails(TooManyRepeatedVerticesError()) + self._last_received_block = blk + return + + # basic linearity validation, crucial for correctly predicting the next block's height + if self._reverse: + if self._last_received_block and blk.hash != self._last_received_block.get_block_parent_hash(): + self.fails(BlockNotConnectedToPreviousBlock()) + return + else: + if self._last_received_block and blk.get_block_parent_hash() != self._last_received_block.hash: + self.fails(BlockNotConnectedToPreviousBlock()) + return + + if is_duplicated: + self.log.debug('block early terminate?', blk_id=blk.hash.hex()) + else: + self.log.debug('block received', blk_id=blk.hash.hex()) + + if blk.can_validate_full(): + try: + self.manager.on_new_tx(blk, propagate_to_peers=False, fails_silently=False) + except HathorError: + self.fails(InvalidVertexError(blk.hash.hex())) + return + else: + self._partial_blocks.append(blk) + + self._last_received_block = blk + self._blk_repeated = 0 + # XXX: debugging log, maybe add timing info + if self._blk_received % 500 == 0: + self.log.debug('block streaming in progress', blocks_received=self._blk_received) + + def handle_blocks_end(self, response_code: StreamEnd) -> None: + """This method is called by the sync agent when a BLOCKS-END message is received.""" + if self._deferred.called: + return + self._deferred.callback(response_code) diff --git a/hathor/p2p/sync_v2/exception.py b/hathor/p2p/sync_v2/exception.py new file mode 100644 index 000000000..3a62731ec --- /dev/null +++ b/hathor/p2p/sync_v2/exception.py @@ -0,0 +1,42 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class StreamingError(Exception): + """Base error for sync-v2 streaming.""" + pass + + +class TooManyVerticesReceivedError(StreamingError): + """Raised when the other peer has sent too many vertices.""" + pass + + +class TooManyRepeatedVerticesError(StreamingError): + """Raised when the other peer has sent too many repeated vertices.""" + pass + + +class BlockNotConnectedToPreviousBlock(StreamingError): + """Raised when the received block is not connected to the previous one.""" + pass + + +class InvalidVertexError(StreamingError): + """Raised when the received vertex fails validation.""" + pass + + +class UnexpectedVertex(StreamingError): + """Raised when we are not expecting the received vertex.""" + pass diff --git a/hathor/p2p/sync_v2/factory.py b/hathor/p2p/sync_v2/factory.py index defb37283..71f17dd87 100644 --- a/hathor/p2p/sync_v2/factory.py +++ b/hathor/p2p/sync_v2/factory.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from hathor.p2p.manager import ConnectionsManager from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.sync_factory import SyncAgentFactory from hathor.p2p.sync_v2.agent import NodeBlockSync -from hathor.util import Reactor +from hathor.reactor import ReactorProtocol as Reactor if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol @@ -28,5 +28,5 @@ class SyncV2Factory(SyncAgentFactory): def __init__(self, connections: ConnectionsManager): self.connections = connections - def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None) -> SyncAgent: + def create_sync_agent(self, protocol: 'HathorProtocol', reactor: Reactor) -> SyncAgent: return NodeBlockSync(protocol, reactor=reactor) diff --git a/hathor/p2p/sync_v2/mempool.py b/hathor/p2p/sync_v2/mempool.py index 97020eff1..e27796fc5 100644 --- a/hathor/p2p/sync_v2/mempool.py +++ b/hathor/p2p/sync_v2/mempool.py @@ -39,6 +39,8 @@ def __init__(self, sync_agent: 'NodeBlockSync'): self.tx_storage = self.manager.tx_storage self.reactor = self.sync_agent.reactor + self._deferred: Optional[Deferred[None]] = None + # Set of tips we know but couldn't add to the DAG yet. self.missing_tips: set[bytes] = set() @@ -52,14 +54,21 @@ def is_running(self) -> bool: """Whether the sync-mempool is currently running.""" return self._is_running - def run(self) -> None: + def run(self) -> Deferred[None]: """Starts _run in, won't start again if already running.""" if self.is_running(): self.log.warn('already started') - return + assert self._deferred is not None + return self._deferred self._is_running = True self.reactor.callLater(0, self._run) + # TODO Implement a stop() and call it after N minutes. + + assert self._deferred is None + self._deferred = Deferred() + return self._deferred + @inlineCallbacks def _run(self) -> Generator[Deferred, Any, None]: try: @@ -67,6 +76,9 @@ def _run(self) -> Generator[Deferred, Any, None]: finally: # sync_agent.run_sync will start it again when needed self._is_running = False + assert self._deferred is not None + self._deferred.callback(None) + self._deferred = None @inlineCallbacks def _unsafe_run(self) -> Generator[Deferred, Any, None]: diff --git a/hathor/p2p/sync_v2/payloads.py b/hathor/p2p/sync_v2/payloads.py new file mode 100644 index 000000000..002b2d67f --- /dev/null +++ b/hathor/p2p/sync_v2/payloads.py @@ -0,0 +1,73 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pydantic import validator + +from hathor.types import VertexId +from hathor.utils.pydantic import BaseModel + + +class PayloadBaseModel(BaseModel): + + @classmethod + def convert_hex_to_bytes(cls, value: str | VertexId) -> VertexId: + """Convert a string in hex format to bytes. If bytes are given, it does nothing.""" + if isinstance(value, str): + return bytes.fromhex(value) + elif isinstance(value, VertexId): + return value + raise ValueError('invalid type') + + class Config: + json_encoders = { + VertexId: lambda x: x.hex() + } + + +class GetNextBlocksPayload(PayloadBaseModel): + """GET-NEXT-BLOCKS message is used to request a stream of blocks in the best blockchain.""" + + start_hash: VertexId + end_hash: VertexId + quantity: int + + @validator('start_hash', 'end_hash', pre=True) + def validate_bytes_fields(cls, value: str | bytes) -> VertexId: + return cls.convert_hex_to_bytes(value) + + +class BestBlockPayload(PayloadBaseModel): + """BEST-BLOCK message is used to send information about the current best block.""" + + block: VertexId + height: int + + @validator('block', pre=True) + def validate_bytes_fields(cls, value: str | VertexId) -> VertexId: + return cls.convert_hex_to_bytes(value) + + +class GetTransactionsBFSPayload(PayloadBaseModel): + """GET-TRANSACTIONS-BFS message is used to request a stream of transactions confirmed by blocks.""" + start_from: list[VertexId] + first_block_hash: VertexId + last_block_hash: VertexId + + @validator('first_block_hash', 'last_block_hash', pre=True) + def validate_bytes_fields(cls, value: str | VertexId) -> VertexId: + return cls.convert_hex_to_bytes(value) + + @validator('start_from', pre=True, each_item=True) + def validate_start_from(cls, value: str | VertexId) -> VertexId: + return cls.convert_hex_to_bytes(value) diff --git a/hathor/p2p/sync_v2/streamers.py b/hathor/p2p/sync_v2/streamers.py index 1c8fac80e..22dbd8360 100644 --- a/hathor/p2p/sync_v2/streamers.py +++ b/hathor/p2p/sync_v2/streamers.py @@ -13,7 +13,7 @@ # limitations under the License. from enum import IntFlag -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Iterable, Iterator, Optional, Union from structlog import get_logger from twisted.internet.interfaces import IConsumer, IDelayedCall, IPushProducer @@ -21,6 +21,7 @@ from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.storage.traversal import BFSOrderWalk +from hathor.util import not_none from hathor.utils.zope import asserted_cast if TYPE_CHECKING: @@ -38,6 +39,9 @@ class StreamEnd(IntFlag): LIMIT_EXCEEDED = 2 STREAM_BECAME_VOIDED = 3 # this will happen when the current chain becomes voided while it is being sent TX_NOT_CONFIRMED = 4 + INVALID_PARAMS = 5 + INTERNAL_ERROR = 6 + PER_REQUEST = 7 def __str__(self): if self is StreamEnd.END_HASH_REACHED: @@ -50,15 +54,23 @@ def __str__(self): return 'streamed block chain became voided' elif self is StreamEnd.TX_NOT_CONFIRMED: return 'streamed reached a tx that is not confirmed' + elif self is StreamEnd.INVALID_PARAMS: + return 'streamed with invalid parameters' + elif self is StreamEnd.INTERNAL_ERROR: + return 'internal error' + elif self is StreamEnd.PER_REQUEST: + return 'stopped per request' else: raise ValueError(f'invalid StreamEnd value: {self.value}') @implementer(IPushProducer) -class _StreamingBase: - def __init__(self, node_sync: 'NodeBlockSync', *, limit: int = DEFAULT_STREAMING_LIMIT): - self.node_sync = node_sync - self.protocol: 'HathorProtocol' = node_sync.protocol +class _StreamingServerBase: + def __init__(self, sync_agent: 'NodeBlockSync', *, limit: int = DEFAULT_STREAMING_LIMIT): + self.sync_agent = sync_agent + self.tx_storage = self.sync_agent.tx_storage + self.protocol: 'HathorProtocol' = sync_agent.protocol + assert self.protocol.transport is not None consumer = asserted_cast(IConsumer, self.protocol.transport) self.consumer = consumer @@ -70,7 +82,7 @@ def __init__(self, node_sync: 'NodeBlockSync', *, limit: int = DEFAULT_STREAMING self.is_producing: bool = False self.delayed_call: Optional[IDelayedCall] = None - self.log = logger.new(peer=node_sync.protocol.get_short_peer_id()) + self.log = logger.new(peer=sync_agent.protocol.get_short_peer_id()) def schedule_if_needed(self) -> None: """Schedule `send_next` if needed.""" @@ -83,13 +95,27 @@ def schedule_if_needed(self) -> None: if self.delayed_call and self.delayed_call.active(): return - self.delayed_call = self.node_sync.reactor.callLater(0, self.send_next) + self.delayed_call = self.sync_agent.reactor.callLater(0, self.safe_send_next) + + def safe_send_next(self) -> None: + """Call send_next() and schedule next call.""" + try: + self.send_next() + except Exception: + self._stop_streaming_server(StreamEnd.INTERNAL_ERROR) + raise + else: + self.schedule_if_needed() + + def _stop_streaming_server(self, response_code: StreamEnd) -> None: + """Stop streaming server.""" + raise NotImplementedError def start(self) -> None: """Start pushing.""" self.log.debug('start streaming') - assert not self.node_sync._is_streaming - self.node_sync._is_streaming = True + assert not self.sync_agent._is_streaming + self.sync_agent._is_streaming = True self.is_running = True self.consumer.registerProducer(self, True) self.resumeProducing() @@ -97,11 +123,11 @@ def start(self) -> None: def stop(self) -> None: """Stop pushing.""" self.log.debug('stop streaming') - assert self.node_sync._is_streaming + assert self.sync_agent._is_streaming self.is_running = False self.pauseProducing() self.consumer.unregisterProducer() - self.node_sync._is_streaming = False + self.sync_agent._is_streaming = False def send_next(self) -> None: """Push next block to peer.""" @@ -123,16 +149,19 @@ def stopProducing(self) -> None: self.pauseProducing() -class BlockchainStreaming(_StreamingBase): - def __init__(self, node_sync: 'NodeBlockSync', start_block: Block, end_hash: bytes, +class BlockchainStreamingServer(_StreamingServerBase): + def __init__(self, sync_agent: 'NodeBlockSync', start_block: Block, end_hash: bytes, *, limit: int = DEFAULT_STREAMING_LIMIT, reverse: bool = False): - super().__init__(node_sync, limit=limit) + super().__init__(sync_agent, limit=limit) self.start_block = start_block self.current_block: Optional[Block] = start_block self.end_hash = end_hash self.reverse = reverse + def _stop_streaming_server(self, response_code: StreamEnd) -> None: + self.sync_agent.stop_blk_streaming_server(response_code) + def send_next(self) -> None: """Push next block to peer.""" assert self.is_running @@ -145,32 +174,21 @@ def send_next(self) -> None: meta = cur.get_metadata() if meta.voided_by: - self.stop() - self.node_sync.send_blocks_end(StreamEnd.STREAM_BECAME_VOIDED) + self.sync_agent.stop_blk_streaming_server(StreamEnd.STREAM_BECAME_VOIDED) return if cur.hash == self.end_hash: # only send the last when not reverse if not self.reverse: - self.log.debug('send next block', blk_id=cur.hash.hex()) - self.node_sync.send_blocks(cur) - self.stop() - self.node_sync.send_blocks_end(StreamEnd.END_HASH_REACHED) - return - - if self.counter >= self.limit: - # only send the last when not reverse - if not self.reverse: - self.log.debug('send next block', blk_id=cur.hash.hex()) - self.node_sync.send_blocks(cur) - self.stop() - self.node_sync.send_blocks_end(StreamEnd.LIMIT_EXCEEDED) + self.log.debug('send next block', height=cur.get_height(), blk_id=cur.hash.hex()) + self.sync_agent.send_blocks(cur) + self.sync_agent.stop_blk_streaming_server(StreamEnd.END_HASH_REACHED) return self.counter += 1 - self.log.debug('send next block', blk_id=cur.hash.hex()) - self.node_sync.send_blocks(cur) + self.log.debug('send next block', height=cur.get_height(), blk_id=cur.hash.hex()) + self.sync_agent.send_blocks(cur) if self.reverse: self.current_block = cur.get_block_parent() @@ -179,39 +197,77 @@ def send_next(self) -> None: # XXX: don't send the genesis or the current block if self.current_block is None or self.current_block.is_genesis: - self.stop() - self.node_sync.send_blocks_end(StreamEnd.NO_MORE_BLOCKS) + self.sync_agent.stop_blk_streaming_server(StreamEnd.NO_MORE_BLOCKS) return - self.schedule_if_needed() + if self.counter >= self.limit: + self.sync_agent.stop_blk_streaming_server(StreamEnd.LIMIT_EXCEEDED) + return -class TransactionsStreaming(_StreamingBase): +class TransactionsStreamingServer(_StreamingServerBase): """Streams all transactions confirmed by the given block, from right to left (decreasing timestamp). + + If the start_from parameter is not empty, the BFS (Breadth-First Search) for the first block will commence + using the provided hashes. This mechanism enables streaming requests to continue from a specific point + should there be interruptions or issues. """ - def __init__(self, node_sync: 'NodeBlockSync', start_from: list[BaseTransaction], last_block_hash: bytes, - *, limit: int = DEFAULT_STREAMING_LIMIT): + def __init__(self, + sync_agent: 'NodeBlockSync', + start_from: list[BaseTransaction], + first_block: Block, + last_block: Block, + *, + limit: int = DEFAULT_STREAMING_LIMIT) -> None: # XXX: is limit needed for tx streaming? Or let's always send all txs for # a block? Very unlikely we'll reach this limit - super().__init__(node_sync, limit=limit) - - assert len(start_from) > 0 - assert start_from[0].storage is not None - self.storage = start_from[0].storage - self.last_block_hash = last_block_hash - self.last_block_height = 0 - - self.bfs = BFSOrderWalk(self.storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) - self.iter = self.bfs.run(start_from, skip_root=False) + super().__init__(sync_agent, limit=limit) + + self.first_block: Block = first_block + self.last_block: Block = last_block + self.start_from = start_from + + # Validate that all transactions in `start_from` are confirmed by the first block. + for tx in start_from: + assert tx.get_metadata().first_block == self.first_block.hash + + self.current_block: Optional[Block] = self.first_block + self.bfs = BFSOrderWalk(self.tx_storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) + self.iter = self.get_iter() + + def _stop_streaming_server(self, response_code: StreamEnd) -> None: + self.sync_agent.stop_tx_streaming_server(response_code) + + def get_iter(self) -> Iterator[BaseTransaction]: + """Return an iterator that yields all transactions confirmed by each block in sequence.""" + root: Union[BaseTransaction, Iterable[BaseTransaction]] + skip_root: bool + while self.current_block: + if not self.start_from: + root = self.current_block + skip_root = True + else: + root = self.start_from + skip_root = False + self.log.debug('iterating over transactions from block', + block=not_none(self.current_block.hash).hex(), + height=self.current_block.get_height(), + start_from=self.start_from, + skip_root=skip_root) + it = self.bfs.run(root, skip_root=skip_root) + yield from it + if self.current_block == self.last_block: + break + + # Check if this block is still in the best blockchain. + if self.current_block.get_metadata().voided_by: + self.sync_agent.stop_tx_streaming_server(StreamEnd.STREAM_BECAME_VOIDED) + return - def start(self) -> None: - super().start() - last_blk = self.storage.get_transaction(self.last_block_hash) - assert isinstance(last_blk, Block) - self.last_block_height = last_blk.get_height() + self.current_block = self.current_block.get_next_block_best_chain() + self.start_from.clear() - # TODO: make this generic too? def send_next(self) -> None: """Push next transaction to peer.""" assert self.is_running @@ -221,14 +277,13 @@ def send_next(self) -> None: cur = next(self.iter) except StopIteration: # nothing more to send - self.stop() - self.node_sync.send_transactions_end(StreamEnd.END_HASH_REACHED) + self.log.debug('no more transactions, stopping streaming') + self.sync_agent.stop_tx_streaming_server(StreamEnd.END_HASH_REACHED) return + # Skip blocks. if cur.is_block: - if cur.hash == self.last_block_hash: - self.bfs.skip_neighbors(cur) - self.schedule_if_needed() + self.bfs.skip_neighbors(cur) return assert isinstance(cur, Transaction) @@ -236,32 +291,24 @@ def send_next(self) -> None: cur_metadata = cur.get_metadata() if cur_metadata.first_block is None: - self.log.debug('reached a tx that is not confirming, continuing anyway') - # XXX: related to issue #711 - # self.stop() - # self.node_sync.send_transactions_end(StreamEnd.TX_NOT_CONFIRMED) - # return - else: - assert cur_metadata.first_block is not None - first_blk_meta = self.storage.get_metadata(cur_metadata.first_block) - assert first_blk_meta is not None - confirmed_by_height = first_blk_meta.height - assert confirmed_by_height is not None - if confirmed_by_height <= self.last_block_height: - # got to a tx that is confirmed by the given last-block or an older block - self.log.debug('tx confirmed by block older than last_block', tx=cur.hash_hex, - confirmed_by_height=confirmed_by_height, last_block_height=self.last_block_height) - self.bfs.skip_neighbors(cur) - self.schedule_if_needed() - return + self.log.debug('reached a tx that is not confirmed, stopping streaming') + self.sync_agent.stop_tx_streaming_server(StreamEnd.TX_NOT_CONFIRMED) + return + + # Check if tx is confirmed by the `self.current_block` or any next block. + assert cur_metadata.first_block is not None + assert self.current_block is not None + first_block = self.tx_storage.get_transaction(cur_metadata.first_block) + if not_none(first_block.get_metadata().height) < not_none(self.current_block.get_metadata().height): + self.log.debug('skipping tx: out of current block') + self.bfs.skip_neighbors(cur) + return self.log.debug('send next transaction', tx_id=cur.hash.hex()) - self.node_sync.send_transaction(cur) + self.sync_agent.send_transaction(cur) self.counter += 1 if self.counter >= self.limit: - self.stop() - self.node_sync.send_transactions_end(StreamEnd.LIMIT_EXCEEDED) + self.log.debug('limit exceeded, stopping streaming') + self.sync_agent.stop_tx_streaming_server(StreamEnd.LIMIT_EXCEEDED) return - - self.schedule_if_needed() diff --git a/hathor/p2p/sync_v2/transaction_streaming_client.py b/hathor/p2p/sync_v2/transaction_streaming_client.py new file mode 100644 index 000000000..b46ea546b --- /dev/null +++ b/hathor/p2p/sync_v2/transaction_streaming_client.py @@ -0,0 +1,252 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from typing import TYPE_CHECKING, Any, Generator, Optional + +from structlog import get_logger +from twisted.internet.defer import Deferred, inlineCallbacks + +from hathor.p2p.sync_v2.exception import ( + InvalidVertexError, + StreamingError, + TooManyVerticesReceivedError, + UnexpectedVertex, +) +from hathor.p2p.sync_v2.streamers import StreamEnd +from hathor.transaction import BaseTransaction +from hathor.transaction.exceptions import HathorError, TxValidationError +from hathor.types import VertexId +from hathor.util import not_none + +if TYPE_CHECKING: + from hathor.p2p.sync_v2.agent import NodeBlockSync + from hathor.transaction import Block + +logger = get_logger() + + +class TransactionStreamingClient: + def __init__(self, + sync_agent: 'NodeBlockSync', + partial_blocks: list['Block'], + *, + limit: int) -> None: + self.sync_agent = sync_agent + self.protocol = self.sync_agent.protocol + self.tx_storage = self.sync_agent.tx_storage + self.manager = self.sync_agent.manager + self.reactor = self.manager.reactor + + self.log = logger.new(peer=self.protocol.get_short_peer_id()) + + # List of blocks from which we will receive transactions. + self.partial_blocks = partial_blocks + + # True if we are processing a transaction. + self._is_processing: bool = False + + # Deferred return to the sync agent. + self._deferred: Deferred[StreamEnd] = Deferred() + + # Number of transactions received. + self._tx_received: int = 0 + + # Maximum number of transactions to be received. + self._tx_max_quantity = limit + + # Queue of transactions waiting to be processed. + self._queue: deque[BaseTransaction] = deque() + + # Keeps the response code if the streaming has ended. + self._response_code: Optional[StreamEnd] = None + + # Index to the current block. + self._idx: int = 0 + + # Set of hashes we are waiting to receive. + self._waiting_for: set[VertexId] = set() + + # In-memory database of transactions already received but still + # waiting for dependencies. + self._db: dict[VertexId, BaseTransaction] = {} + self._existing_deps: set[VertexId] = set() + + self._prepare_block(self.partial_blocks[0]) + + def wait(self) -> Deferred[StreamEnd]: + """Return the deferred.""" + return self._deferred + + def resume(self) -> Deferred[StreamEnd]: + """Resume receiving vertices.""" + assert self._deferred.called + self._tx_received = 0 + self._response_code = None + self._deferred = Deferred() + return self._deferred + + def fails(self, reason: 'StreamingError') -> None: + """Fail the execution by resolving the deferred with an error.""" + if self._deferred.called: + self.log.warn('already failed before', new_reason=repr(reason)) + return + self._deferred.errback(reason) + + def handle_transaction(self, tx: BaseTransaction) -> None: + """This method is called by the sync agent when a TRANSACTION message is received.""" + if self._deferred.called: + return + + self._tx_received += 1 + if self._tx_received > self._tx_max_quantity: + self.log.warn('too many transactions received', + tx_received=self._tx_received, + tx_max_quantity=self._tx_max_quantity) + self.fails(TooManyVerticesReceivedError()) + return + + assert tx.hash is not None + self.log.debug('tx received', tx_id=tx.hash.hex()) + self._queue.append(tx) + assert len(self._queue) <= self._tx_max_quantity + + if not self._is_processing: + self.reactor.callLater(0, self.process_queue) + + @inlineCallbacks + def process_queue(self) -> Generator[Any, Any, None]: + """Process next transaction in the queue.""" + if self._deferred.called: + return + + if self._is_processing: + return + + if not self._queue: + self.check_end() + return + + self._is_processing = True + try: + tx = self._queue.popleft() + self.log.debug('processing tx', tx_id=not_none(tx.hash).hex()) + yield self._process_transaction(tx) + finally: + self._is_processing = False + + self.reactor.callLater(0, self.process_queue) + + @inlineCallbacks + def _process_transaction(self, tx: BaseTransaction) -> Generator[Any, Any, None]: + """Process transaction.""" + assert tx.hash is not None + + # Run basic verification. + if not tx.is_genesis: + try: + self.manager.verification_service.verify_basic(tx) + except TxValidationError as e: + self.fails(InvalidVertexError(repr(e))) + return + + # Any repeated transaction will fail this check because they will + # not belong to the waiting list. + if tx.hash not in self._waiting_for: + if tx.hash in self._db: + # This case might happen during a resume, so we just log and keep syncing. + self.log.debug('duplicated vertex received', tx_id=tx.hash.hex()) + self._update_dependencies(tx) + elif tx.hash in self._existing_deps: + # This case might happen if we already have the transaction from another sync. + self.log.debug('existing vertex received', tx_id=tx.hash.hex()) + self._update_dependencies(tx) + else: + self.log.info('unexpected vertex received', tx_id=tx.hash.hex()) + self.fails(UnexpectedVertex(tx.hash.hex())) + return + self._waiting_for.remove(tx.hash) + + self._update_dependencies(tx) + + self._db[tx.hash] = tx + + if not self._waiting_for: + self.log.debug('no pending dependencies, processing buffer') + while not self._waiting_for: + result = yield self._execute_and_prepare_next() + if not result: + break + else: + self.log.debug('pending dependencies', counter=len(self._waiting_for)) + + if self._tx_received % 100 == 0: + self.log.debug('tx streaming in progress', txs_received=self._tx_received) + + def _update_dependencies(self, tx: BaseTransaction) -> None: + """Update _existing_deps and _waiting_for with the dependencies.""" + for dep in tx.get_all_dependencies(): + if self.tx_storage.transaction_exists(dep) or dep in self._db: + self._existing_deps.add(dep) + else: + self._waiting_for.add(dep) + + def handle_transactions_end(self, response_code: StreamEnd) -> None: + """This method is called by the sync agent when a TRANSACTIONS-END message is received.""" + if self._deferred.called: + return + assert self._response_code is None + self._response_code = response_code + self.check_end() + + def check_end(self) -> None: + """Check if the streaming has ended.""" + if self._response_code is None: + return + + if self._queue: + return + + self.log.info('transactions streaming ended', reason=self._response_code, waiting_for=len(self._waiting_for)) + self._deferred.callback(self._response_code) + + @inlineCallbacks + def _execute_and_prepare_next(self) -> Generator[Any, Any, bool]: + """Add the block and its vertices to the DAG.""" + assert not self._waiting_for + + blk = self.partial_blocks[self._idx] + vertex_list = list(self._db.values()) + vertex_list.sort(key=lambda v: v.timestamp) + + try: + yield self.sync_agent.on_block_complete(blk, vertex_list) + except HathorError as e: + self.fails(InvalidVertexError(repr(e))) + return False + + self._idx += 1 + if self._idx >= len(self.partial_blocks): + return False + + self._prepare_block(self.partial_blocks[self._idx]) + return True + + def _prepare_block(self, blk: 'Block') -> None: + """Reset everything for the next block. It also adds blocks that have no dependencies.""" + self._waiting_for.clear() + self._db.clear() + self._existing_deps.clear() + + self._update_dependencies(blk) diff --git a/hathor/p2p/sync_version.py b/hathor/p2p/sync_version.py index 8db49918a..2a51293c5 100644 --- a/hathor/p2p/sync_version.py +++ b/hathor/p2p/sync_version.py @@ -22,7 +22,6 @@ class SyncVersion(Enum): # to no match different values and in turn not select a certain protocol, this can be done intentionally, for # example, peers using `v2-fake` (which just uses sync-v1) will not connect to peers using `v2-alpha`, and so # on. - V1 = 'v1' V1_1 = 'v1.1' V2 = 'v2' @@ -37,10 +36,7 @@ def get_priority(self) -> int: # XXX: these values are only used internally and in memory, there is no need to keep them consistency, for # example, if we need more granularity, we can just add a 0 to all values and use the values in between, # although this shouldn't really be necessary - if self is SyncVersion.V1: - # low priority - return 10 - elif self is SyncVersion.V1_1: + if self is SyncVersion.V1_1: return 11 elif self is SyncVersion.V2: return 20 @@ -48,7 +44,7 @@ def get_priority(self) -> int: raise ValueError('value is either invalid for this enum or not implemented') def is_v1(self) -> bool: - """Return True for V1 and V1_1.""" + """Return True for V1_1.""" return self.get_priority() < 20 # XXX: total_ordering decorator will implement the other methods: __le__, __gt__, and __ge__ diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index 6904da0a7..12509ffc4 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -14,7 +14,7 @@ import datetime import re -from typing import Any, Generator, Optional +from typing import Any, Optional from urllib.parse import parse_qs, urlparse import requests @@ -25,7 +25,6 @@ from cryptography.hazmat.primitives.serialization import load_pem_private_key from cryptography.x509 import Certificate from cryptography.x509.oid import NameOID -from twisted.internet.defer import inlineCallbacks from twisted.internet.interfaces import IAddress from hathor.conf.get_settings import get_settings @@ -87,7 +86,7 @@ def get_settings_hello_dict() -> dict[str, Any]: for key in settings.P2P_SETTINGS_HASH_FIELDS: value = getattr(settings, key) # We are going to json.dumps this dict, so we can't have bytes here - if type(value) == bytes: + if type(value) is bytes: value = value.hex() settings_dict[key] = value return settings_dict @@ -100,15 +99,14 @@ def connection_string_to_host(connection_string: str) -> str: return urlparse(connection_string).netloc.split(':')[0] -@inlineCallbacks -def discover_dns(host: str, test_mode: int = 0) -> Generator[Any, Any, list[str]]: +async def discover_dns(host: str, test_mode: int = 0) -> list[str]: """ Start a DNS peer discovery object and execute a search for the host Returns the DNS string from the requested host E.g., localhost -> tcp://127.0.0.1:40403 """ discovery = DNSPeerDiscovery([], test_mode=test_mode) - result = yield discovery.dns_seed_lookup(host) + result = await discovery.dns_seed_lookup(host) return result diff --git a/hathor/prometheus.py b/hathor/prometheus.py index 6bd9637d1..63c1a7727 100644 --- a/hathor/prometheus.py +++ b/hathor/prometheus.py @@ -19,7 +19,7 @@ from twisted.internet.task import LoopingCall from hathor.conf.get_settings import get_settings -from hathor.util import reactor +from hathor.reactor import get_global_reactor if TYPE_CHECKING: from hathor.metrics import Metrics @@ -102,7 +102,7 @@ def __init__(self, metrics: 'Metrics', path: str, filename: str = 'hathor.prom', # A timer to periodically write data to prometheus self._lc_write_data = LoopingCall(self._write_data) - self._lc_write_data.clock = reactor + self._lc_write_data.clock = get_global_reactor() def _initial_setup(self) -> None: """ Start a collector registry to send data to node exporter diff --git a/hathor/pubsub.py b/hathor/pubsub.py index b9c5506c3..0a3168aa7 100644 --- a/hathor/pubsub.py +++ b/hathor/pubsub.py @@ -14,17 +14,20 @@ from collections import defaultdict, deque from enum import Enum -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any, Callable, Optional -from twisted.internet.interfaces import IReactorFromThreads +from structlog import get_logger +from twisted.internet.interfaces import IDelayedCall, IReactorFromThreads from twisted.python.threadable import isInIOThread -from hathor.util import Reactor +from hathor.reactor import ReactorProtocol as Reactor from hathor.utils.zope import verified_cast if TYPE_CHECKING: from hathor.transaction import BaseTransaction, Block +logger = get_logger() + class HathorEvents(Enum): """ @@ -170,6 +173,9 @@ def __init__(self, reactor: Reactor) -> None: self._subscribers = defaultdict(list) self.queue: deque[tuple[PubSubCallable, HathorEvents, EventArguments]] = deque() self.reactor = reactor + self.log = logger.new() + + self._call_later_id: Optional[IDelayedCall] = None def subscribe(self, key: HathorEvents, fn: PubSubCallable) -> None: """Subscribe to a specific event. @@ -193,22 +199,36 @@ def _call_next(self) -> None: """Execute next call if it exists.""" if not self.queue: return - fn, key, args = self.queue.popleft() - fn(key, args) - if self.queue: + + self.log.debug('running pubsub call_next', len=len(self.queue)) + + try: + while self.queue: + fn, key, args = self.queue.popleft() + fn(key, args) + except Exception: + self.log.error('event processing failed', key=key, args=args) + raise + finally: self._schedule_call_next() def _schedule_call_next(self) -> None: """Schedule next call's execution.""" assert self.reactor.running + if not self.queue: + return + if not isInIOThread() and (threaded_reactor := verified_cast(IReactorFromThreads, self.reactor)): # We're taking a conservative approach, since not all functions might need to run # on the main thread [yan 2019-02-20] threaded_reactor.callFromThread(self._call_next) return - self.reactor.callLater(0, self._call_next) + if self._call_later_id and self._call_later_id.active(): + return + + self._call_later_id = self.reactor.callLater(0, self._call_next) def publish(self, key: HathorEvents, **kwargs: Any) -> None: """Publish a new event. @@ -224,7 +244,5 @@ def publish(self, key: HathorEvents, **kwargs: Any) -> None: if not self.reactor.running: fn(key, args) else: - is_empty = bool(not self.queue) self.queue.append((fn, key, args)) - if is_empty: - self._schedule_call_next() + self._schedule_call_next() diff --git a/hathor/reactor/__init__.py b/hathor/reactor/__init__.py index e69de29bb..d87649a98 100644 --- a/hathor/reactor/__init__.py +++ b/hathor/reactor/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.reactor.reactor import get_global_reactor, initialize_global_reactor +from hathor.reactor.reactor_protocol import ReactorProtocol + +__all__ = [ + 'initialize_global_reactor', + 'get_global_reactor', + 'ReactorProtocol', +] diff --git a/hathor/reactor/reactor.py b/hathor/reactor/reactor.py index 10bda8e98..e94dc3a97 100644 --- a/hathor/reactor/reactor.py +++ b/hathor/reactor/reactor.py @@ -14,18 +14,72 @@ from typing import cast -from twisted.internet import reactor as twisted_reactor +from structlog import get_logger from twisted.internet.interfaces import IReactorCore, IReactorTCP, IReactorTime from zope.interface.verify import verifyObject from hathor.reactor.reactor_protocol import ReactorProtocol -assert verifyObject(IReactorTime, twisted_reactor) is True -assert verifyObject(IReactorCore, twisted_reactor) is True -assert verifyObject(IReactorTCP, twisted_reactor) is True +logger = get_logger() -""" -This variable is the global reactor that should be imported to use the Twisted reactor. -It's cast to ReactorProtocol, our own type that stubs the necessary Twisted zope interfaces, to aid typing. -""" -reactor = cast(ReactorProtocol, twisted_reactor) +# Internal variable that should NOT be accessed directly. +_reactor: ReactorProtocol | None = None + + +def get_global_reactor() -> ReactorProtocol: + """ + Get the global Twisted reactor. It should be the only way to get a reactor, other than using the instance that + is passed around (which should be the same instance as the one returned by this function). + + This function must NOT be called in the module-level, only inside other functions. + """ + global _reactor + + if _reactor is None: + raise Exception('The reactor is not initialized. Use `initialize_global_reactor()`.') + + return _reactor + + +def initialize_global_reactor(*, use_asyncio_reactor: bool = False) -> ReactorProtocol: + """ + Initialize the global Twisted reactor. Must ony be called once. + This function must NOT be called in the module-level, only inside other functions. + """ + global _reactor + + if _reactor is not None: + log = logger.new() + log.warn('The reactor has already been initialized. Use `get_global_reactor()`.') + return _reactor + + if use_asyncio_reactor: + import asyncio + import sys + + from twisted.internet import asyncioreactor + from twisted.internet.error import ReactorAlreadyInstalledError + + if sys.platform == 'win32': + # See: https://docs.twistedmatrix.com/en/twisted-22.10.0/api/twisted.internet.asyncioreactor.AsyncioSelectorReactor.html # noqa: E501 + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + try: + asyncioreactor.install(asyncio.get_event_loop()) + except ReactorAlreadyInstalledError as e: + msg = ( + "There's a Twisted reactor installed already. It's probably the default one, installed indirectly by " + "one of our imports. This can happen, for example, if we import from the hathor module in " + "entrypoint-level, like in CLI tools other than `RunNode`." + ) + raise Exception(msg) from e + + from twisted.internet import reactor as twisted_reactor + + assert verifyObject(IReactorTime, twisted_reactor) is True + assert verifyObject(IReactorCore, twisted_reactor) is True + assert verifyObject(IReactorTCP, twisted_reactor) is True + + # We cast to ReactorProtocol, our own type that stubs the necessary Twisted zope interfaces, to aid typing. + _reactor = cast(ReactorProtocol, twisted_reactor) + return _reactor diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index 663103ff6..a2170d233 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -71,13 +71,16 @@ def disable_idle_timeout(self): self._proto1.disable_idle_timeout() self._proto2.disable_idle_timeout() - def is_both_synced(self) -> bool: + def is_both_synced(self, *, errmsgs: Optional[list[str]] = None) -> bool: """Short-hand check that can be used to make "step loops" without having to guess the number of iterations.""" + if errmsgs is None: + errmsgs = [] from hathor.p2p.states.ready import ReadyState conn1_aborting = self._proto1.aborting conn2_aborting = self._proto2.aborting if conn1_aborting or conn2_aborting: self.log.debug('conn aborting', conn1_aborting=conn1_aborting, conn2_aborting=conn2_aborting) + errmsgs.append('conn aborting') return False state1 = self._proto1.state state2 = self._proto2.state @@ -85,6 +88,7 @@ def is_both_synced(self) -> bool: state2_is_ready = isinstance(state2, ReadyState) if not state1_is_ready or not state2_is_ready: self.log.debug('peer not ready', peer1_ready=state1_is_ready, peer2_ready=state2_is_ready) + errmsgs.append('peer not ready') return False assert isinstance(state1, ReadyState) # mypy can't infer this from the above assert isinstance(state2, ReadyState) # mypy can't infer this from the above @@ -92,21 +96,25 @@ def is_both_synced(self) -> bool: state2_is_errored = state2.sync_agent.is_errored() if state1_is_errored or state2_is_errored: self.log.debug('peer errored', peer1_errored=state1_is_errored, peer2_errored=state2_is_errored) + errmsgs.append('peer errored') return False state1_is_synced = state1.sync_agent.is_synced() state2_is_synced = state2.sync_agent.is_synced() if not state1_is_synced or not state2_is_synced: self.log.debug('peer not synced', peer1_synced=state1_is_synced, peer2_synced=state2_is_synced) + errmsgs.append('peer not synced') return False [best_block_info1] = state1.protocol.node.tx_storage.get_n_height_tips(1) [best_block_info2] = state2.protocol.node.tx_storage.get_n_height_tips(1) if best_block_info1.id != best_block_info2.id: self.log.debug('best block is different') + errmsgs.append('best block is different') return False tips1 = {i.data for i in state1.protocol.node.tx_storage.get_tx_tips()} tips2 = {i.data for i in state2.protocol.node.tx_storage.get_tx_tips()} if tips1 != tips2: self.log.debug('tx tips are different') + errmsgs.append('tx tips are different') return False return True diff --git a/hathor/simulator/miner/geometric_miner.py b/hathor/simulator/miner/geometric_miner.py index 5a2173287..2dc8209d6 100644 --- a/hathor/simulator/miner/geometric_miner.py +++ b/hathor/simulator/miner/geometric_miner.py @@ -16,6 +16,7 @@ from typing import TYPE_CHECKING, Optional from hathor.conf.get_settings import get_settings +from hathor.exception import BlockTemplateTimestampError from hathor.manager import HathorEvents from hathor.simulator.miner.abstract_miner import AbstractMiner from hathor.util import Random @@ -96,13 +97,19 @@ def _schedule_next_block(self): self._block = None if self._manager.can_start_mining(): - block = self._generate_mining_block() - geometric_p = 2**(-block.weight) - trials = self._rng.geometric(geometric_p) - dt = 1.0 * trials / self._hashpower - self._block = block - self.log.debug('randomized step: start mining new block', dt=dt, parents=[h.hex() for h in block.parents], - block_timestamp=block.timestamp) + try: + block = self._generate_mining_block() + except BlockTemplateTimestampError: + dt = 5 # Try again in 5 seconds. + else: + geometric_p = 2**(-block.weight) + trials = self._rng.geometric(geometric_p) + dt = 1.0 * trials / self._hashpower + self._block = block + self.log.debug('randomized step: start mining new block', + dt=dt, + parents=[h.hex() for h in block.parents], + block_timestamp=block.timestamp) else: dt = 60 diff --git a/hathor/simulator/patches.py b/hathor/simulator/patches.py new file mode 100644 index 000000000..3c056249e --- /dev/null +++ b/hathor/simulator/patches.py @@ -0,0 +1,37 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from structlog import get_logger + +from hathor.mining.cpu_mining_service import CpuMiningService +from hathor.transaction import BaseTransaction +from hathor.verification.vertex_verifier import VertexVerifier + +logger = get_logger() + + +class SimulatorVertexVerifier(VertexVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + assert vertex.hash is not None + logger.new().debug('Skipping VertexVerifier.verify_pow() for simulator') + + +class SimulatorCpuMiningService(CpuMiningService): + def resolve(self, vertex: BaseTransaction, *, update_time: bool = False) -> bool: + vertex.update_hash() + logger.new().debug('Skipping CpuMiningService.resolve() for simulator') + return True diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index 27507baf9..b6c546a3f 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -22,13 +22,17 @@ from hathor.builder import BuildArtifacts, Builder from hathor.conf.get_settings import get_settings -from hathor.daa import TestMode, _set_test_mode +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.feature_activation.feature_service import FeatureService from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId from hathor.simulator.clock import HeapClock, MemoryReactorHeapClock from hathor.simulator.miner.geometric_miner import GeometricMiner +from hathor.simulator.patches import SimulatorCpuMiningService, SimulatorVertexVerifier from hathor.simulator.tx_generator import RandomTransactionGenerator from hathor.util import Random +from hathor.verification.vertex_verifiers import VertexVerifiers from hathor.wallet import HDWallet if TYPE_CHECKING: @@ -40,82 +44,17 @@ DEFAULT_STEP_INTERVAL: float = 0.25 DEFAULT_STATUS_INTERVAL: float = 60.0 +SIMULATOR_AVG_TIME_BETWEEN_BLOCKS: int = 64 class Simulator: - # used to concilite monkeypatching and multiple instances - _patches_rc: int = 0 - - @classmethod - def _apply_patches(cls): - """ Applies global patches on modules that aren't easy/possible to configure otherwise. - - Patches: - - - disable pow verification - - disable Transaction.resolve method - - set DAA test-mode to DISABLED (will actually run the pow function, that won't actually verify the pow) - - override AVG_TIME_BETWEEN_BLOCKS to 64 - """ - from hathor.transaction import BaseTransaction - - def verify_pow(self: BaseTransaction, *args: Any, **kwargs: Any) -> None: - assert self.hash is not None - logger.new().debug('Skipping BaseTransaction.verify_pow() for simulator') - - def resolve(self: BaseTransaction, update_time: bool = True) -> bool: - self.update_hash() - logger.new().debug('Skipping BaseTransaction.resolve() for simulator') - return True - - cls._original_verify_pow = BaseTransaction.verify_pow - BaseTransaction.verify_pow = verify_pow - - cls._original_resolve = BaseTransaction.resolve - BaseTransaction.resolve = resolve - - _set_test_mode(TestMode.DISABLED) - - from hathor import daa - cls._original_avg_time_between_blocks = daa.AVG_TIME_BETWEEN_BLOCKS - daa.AVG_TIME_BETWEEN_BLOCKS = 64 - - @classmethod - def _remove_patches(cls): - """ Remove the patches previously applied. - """ - from hathor.transaction import BaseTransaction - BaseTransaction.verify_pow = cls._original_verify_pow - BaseTransaction.resolve = cls._original_resolve - - from hathor import daa - daa.AVG_TIME_BETWEEN_BLOCKS = cls._original_avg_time_between_blocks - - @classmethod - def _patches_rc_increment(cls): - """ This is used by when starting instances of Simulator to determine when to run _apply_patches""" - assert cls._patches_rc >= 0 - cls._patches_rc += 1 - if cls._patches_rc == 1: - # patches not yet applied - cls._apply_patches() - - @classmethod - def _patches_rc_decrement(cls): - """ This is used by when stopping instances of Simulator to determine when to run _remove_patches""" - assert cls._patches_rc > 0 - cls._patches_rc -= 1 - if cls._patches_rc == 0: - # patches not needed anymore - cls._remove_patches() - def __init__(self, seed: Optional[int] = None): self.log = logger.new() if seed is None: seed = secrets.randbits(64) self.seed = seed self.rng = Random(self.seed) - self.settings = get_settings() + self.settings = get_settings()._replace(AVG_TIME_BETWEEN_BLOCKS=SIMULATOR_AVG_TIME_BETWEEN_BLOCKS) self._network = 'testnet' self._clock = MemoryReactorHeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() @@ -126,7 +65,6 @@ def start(self) -> None: """Has to be called before any other method can be called.""" assert not self._started self._started = True - self._patches_rc_increment() first_timestamp = self.settings.GENESIS_BLOCK_TIMESTAMP dt = self.rng.randint(3600, 120 * 24 * 3600) self._clock.advance(first_timestamp + dt) @@ -136,7 +74,6 @@ def stop(self) -> None: """Can only stop after calling start, but it doesn't matter if it's paused or not""" assert self._started self._started = False - self._patches_rc_decrement() def get_default_builder(self) -> Builder: """ @@ -149,7 +86,8 @@ def get_default_builder(self) -> Builder: .enable_full_verification() \ .enable_sync_v1() \ .enable_sync_v2() \ - .use_memory() + .use_memory() \ + .set_settings(self.settings) def create_peer(self, builder: Optional[Builder] = None) -> HathorManager: """ @@ -170,10 +108,16 @@ def create_artifacts(self, builder: Optional[Builder] = None) -> BuildArtifacts: wallet = HDWallet(gap_limit=2) wallet._manually_initialize() + cpu_mining_service = SimulatorCpuMiningService() + daa = DifficultyAdjustmentAlgorithm(settings=self.settings) + artifacts = builder \ .set_reactor(self._clock) \ .set_rng(Random(self.rng.getrandbits(64))) \ .set_wallet(wallet) \ + .set_vertex_verifiers_builder(_build_vertex_verifiers) \ + .set_daa(daa) \ + .set_cpu_mining_service(cpu_mining_service) \ .build() artifacts.manager.start() @@ -297,3 +241,19 @@ def run(self, if trigger is not None: return False return True + + +def _build_vertex_verifiers( + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + feature_service: FeatureService +) -> VertexVerifiers: + """ + A custom VertexVerifiers builder to be used by the simulator. + """ + return VertexVerifiers.create( + settings=settings, + vertex_verifier=SimulatorVertexVerifier(settings=settings, daa=daa), + daa=daa, + feature_service=feature_service, + ) diff --git a/hathor/simulator/trigger.py b/hathor/simulator/trigger.py index 5745523ce..a57844083 100644 --- a/hathor/simulator/trigger.py +++ b/hathor/simulator/trigger.py @@ -16,6 +16,9 @@ from typing import TYPE_CHECKING, Callable if TYPE_CHECKING: + from re import Match, Pattern + + from hathor.p2p.protocol import HathorLineReceiver from hathor.simulator.fake_connection import FakeConnection from hathor.simulator.miner import AbstractMiner from hathor.simulator.tx_generator import RandomTransactionGenerator @@ -107,3 +110,32 @@ def __init__(self, sub_triggers: list[Trigger]) -> None: def should_stop(self) -> bool: return all(trigger.should_stop() for trigger in self._sub_triggers) + + +class StopWhenSendLineMatch(Trigger): + """Stop the simulation when the node sends a line that matches a designated regex pattern. + """ + + def __init__(self, protocol: 'HathorLineReceiver', regex: 'Pattern') -> None: + # patches protocol.sendLine + self.original_send_line = protocol.sendLine + setattr(protocol, 'sendLine', self._send_line_wrapper) + + # regex pattern + self.regex = regex + + # list of matches + self.matches: list['Match'] = [] + + def _send_line_wrapper(self, line: str) -> None: + """Check if line matches a designated regex pattern.""" + self.original_send_line(line) + match = self.regex.match(line) + if match: + self.matches.append(match) + + def should_stop(self) -> bool: + if self.matches: + self.matches = [] + return True + return False diff --git a/hathor/simulator/tx_generator.py b/hathor/simulator/tx_generator.py index 6bb76c1a8..347721d5b 100644 --- a/hathor/simulator/tx_generator.py +++ b/hathor/simulator/tx_generator.py @@ -17,12 +17,11 @@ from structlog import get_logger -from hathor import daa from hathor.conf.get_settings import get_settings +from hathor.simulator.utils import NoCandidatesError, gen_new_double_spending, gen_new_tx from hathor.transaction.exceptions import RewardLocked from hathor.util import Random from hathor.wallet.exceptions import InsufficientFunds -from tests.utils import NoCandidatesError, gen_new_double_spending, gen_new_tx if TYPE_CHECKING: from hathor.manager import HathorManager @@ -128,7 +127,7 @@ def new_tx_step1(self): self.delayedcall = self.clock.callLater(0, self.schedule_next_transaction) return - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = self.manager.daa.minimum_tx_weight(tx) tx.update_hash() geometric_p = 2**(-tx.weight) diff --git a/hathor/simulator/utils.py b/hathor/simulator/utils.py new file mode 100644 index 000000000..863bbfbdb --- /dev/null +++ b/hathor/simulator/utils.py @@ -0,0 +1,183 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, cast + +from hathor.crypto.util import decode_address +from hathor.manager import HathorManager +from hathor.transaction import Block, Transaction +from hathor.types import Address, VertexId + + +def gen_new_tx(manager: HathorManager, address: str, value: int, verify: bool = True) -> Transaction: + """ + Generate and return a new transaction. + + Args: + manager: the HathorManager to generate the transaction for + address: an address for the transaction's output + value: a value for the transaction's output + verify: whether to verify the generated transaction + + Returns: the generated transaction. + """ + from hathor.transaction import Transaction + from hathor.wallet.base_wallet import WalletOutputInfo + + outputs = [] + outputs.append(WalletOutputInfo(address=decode_address(address), value=int(value), timelock=None)) + + assert manager.wallet is not None + tx = manager.wallet.prepare_transaction_compute_inputs(Transaction, outputs, manager.tx_storage) + tx.storage = manager.tx_storage + + max_ts_spent_tx = max(tx.get_spent_tx(txin).timestamp for txin in tx.inputs) + tx.timestamp = max(max_ts_spent_tx + 1, int(manager.reactor.seconds())) + + tx.weight = 1 + tx.parents = manager.get_new_tx_parents(tx.timestamp) + manager.cpu_mining_service.resolve(tx) + if verify: + manager.verification_service.verify(tx) + return tx + + +def add_new_blocks( + manager: HathorManager, + num_blocks: int, + advance_clock: Optional[int] = None, + *, + parent_block_hash: Optional[VertexId] = None, + block_data: bytes = b'', + weight: Optional[float] = None, + address: Optional[Address] = None, + signal_bits: int | None = None, +) -> list[Block]: + """ Create, resolve and propagate some blocks + + :param manager: Manager object to handle the creation + :type manager: :py:class:`hathor.manager.HathorManager` + + :param num_blocks: Quantity of blocks to be created + :type num_blocks: int + + :return: Blocks created + :rtype: list[Block] + """ + blocks = [] + for _ in range(num_blocks): + blocks.append( + add_new_block(manager, advance_clock, parent_block_hash=parent_block_hash, + data=block_data, weight=weight, address=address, signal_bits=signal_bits) + ) + if parent_block_hash: + parent_block_hash = blocks[-1].hash + return blocks + + +def add_new_block( + manager: HathorManager, + advance_clock: Optional[int] = None, + *, + parent_block_hash: Optional[VertexId] = None, + data: bytes = b'', + weight: Optional[float] = None, + address: Optional[Address] = None, + propagate: bool = True, + signal_bits: int | None = None, +) -> Block: + """ Create, resolve and propagate a new block + + :param manager: Manager object to handle the creation + :type manager: :py:class:`hathor.manager.HathorManager` + + :return: Block created + :rtype: :py:class:`hathor.transaction.block.Block` + """ + block = manager.generate_mining_block(parent_block_hash=parent_block_hash, data=data, address=address) + if weight is not None: + block.weight = weight + if signal_bits is not None: + block.signal_bits = signal_bits + manager.cpu_mining_service.resolve(block) + manager.verification_service.validate_full(block) + if propagate: + manager.propagate_tx(block, fails_silently=False) + if advance_clock: + assert hasattr(manager.reactor, 'advance') + manager.reactor.advance(advance_clock) + return block + + +class NoCandidatesError(Exception): + pass + + +def gen_new_double_spending(manager: HathorManager, *, use_same_parents: bool = False, + tx: Optional[Transaction] = None, weight: float = 1) -> Transaction: + """ + Generate and return a double spending transaction. + + Args: + manager: the HathorManager to generate the transaction for + use_same_parents: whether to use the same parents as the original transaction + tx: the original transaction do double spend + weight: the new transaction's weight + + Returns: the double spending transaction. + """ + if tx is None: + tx_candidates = manager.get_new_tx_parents() + genesis = manager.tx_storage.get_all_genesis() + genesis_txs = [tx for tx in genesis if not tx.is_block] + # XXX: it isn't possible to double-spend a genesis transaction, thus we remove it from tx_candidates + for genesis_tx in genesis_txs: + if genesis_tx.hash in tx_candidates: + tx_candidates.remove(genesis_tx.hash) + if not tx_candidates: + raise NoCandidatesError() + # assert tx_candidates, 'Must not be empty, otherwise test was wrongly set up' + tx_hash = manager.rng.choice(tx_candidates) + tx = cast(Transaction, manager.tx_storage.get_transaction(tx_hash)) + + txin = manager.rng.choice(tx.inputs) + + from hathor.transaction.scripts import P2PKH, parse_address_script + spent_tx = tx.get_spent_tx(txin) + spent_txout = spent_tx.outputs[txin.index] + p2pkh = parse_address_script(spent_txout.script) + assert isinstance(p2pkh, P2PKH) + + from hathor.wallet.base_wallet import WalletInputInfo, WalletOutputInfo + value = spent_txout.value + wallet = manager.wallet + assert wallet is not None + private_key = wallet.get_private_key(p2pkh.address) + inputs = [WalletInputInfo(tx_id=txin.tx_id, index=txin.index, private_key=private_key)] + + address = wallet.get_unused_address(mark_as_used=True) + outputs = [WalletOutputInfo(address=decode_address(address), value=int(value), timelock=None)] + + tx2 = wallet.prepare_transaction(Transaction, inputs, outputs) + tx2.storage = manager.tx_storage + tx2.weight = weight + tx2.timestamp = max(tx.timestamp + 1, int(manager.reactor.seconds())) + + if use_same_parents: + tx2.parents = list(tx.parents) + else: + tx2.parents = manager.get_new_tx_parents(tx2.timestamp) + + manager.cpu_mining_service.resolve(tx2) + return tx2 diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index 6abc2dfbd..5d4085569 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -38,9 +38,11 @@ from hathor.exception import InvalidNewTransaction from hathor.p2p.utils import format_address from hathor.pubsub import EventArguments, HathorEvents +from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction, BitcoinAuxPow, Block, MergeMinedBlock, Transaction, sum_weights from hathor.transaction.exceptions import PowError, ScriptError, TxValidationError -from hathor.util import Reactor, json_dumpb, json_loadb, reactor +from hathor.util import json_dumpb, json_loadb +from hathor.verification.vertex_verifier import VertexVerifier from hathor.wallet.exceptions import InvalidAddress if TYPE_CHECKING: @@ -525,8 +527,10 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: self.log.debug('share received', block=tx, block_base=block_base.hex(), block_base_hash=block_base_hash.hex()) + verifier = VertexVerifier(settings=self._settings, daa=self.manager.daa) + try: - tx.verify_pow(job.weight) + verifier.verify_pow(tx, override_weight=job.weight) except PowError: self.log.error('bad share, discard', job_weight=job.weight, tx=tx) return self.send_error(INVALID_SOLUTION, msgid, { @@ -542,7 +546,7 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: self.manager.reactor.callLater(0, self.job_request) try: - tx.verify_pow() + verifier.verify_pow(tx) except PowError: # Transaction pow was not enough, but the share was succesfully submited self.log.info('high hash, keep mining', tx=tx) @@ -732,7 +736,7 @@ class StratumFactory(ServerFactory): mined_txs: dict[bytes, Transaction] deferreds_tx: dict[bytes, Deferred] - def __init__(self, manager: 'HathorManager', reactor: Reactor = reactor): + def __init__(self, manager: 'HathorManager', reactor: Reactor): self.log = logger.new() self.manager = manager self.reactor = reactor @@ -821,7 +825,7 @@ class StratumClient(JSONRPC): address: Optional[bytes] - def __init__(self, proc_count: Optional[int] = None, address: Optional[bytes] = None, + def __init__(self, reactor: Reactor, proc_count: Optional[int] = None, address: Optional[bytes] = None, id_generator: Optional[Callable[[], Iterator[Union[str, int]]]] = lambda: count()): self.log = logger.new() self.job_data = MinerJob() @@ -833,24 +837,23 @@ def __init__(self, proc_count: Optional[int] = None, address: Optional[bytes] = self.loop = None self.address = address self._iter_id = id_generator and id_generator() or None + self.reactor = reactor def _next_id(self): if self._iter_id: return str(next(self._iter_id)) - def start(self, clock: Optional[Reactor] = None) -> None: + def start(self) -> None: """ Starts the client, instantiating mining processes and scheduling miner supervisor calls. """ - if clock is None: - clock = reactor args = (self.job_data, self.signal, self.queue) proc_count = self.proc_count or cast(int, cpu_count()) self.signal.value = self.SLEEP self.miners = [Process(target=miner_job, args=(i, proc_count, *args)) for i in range(proc_count)] self.loop = task.LoopingCall(supervisor_job, self) - self.loop.clock = clock + self.loop.clock = self.reactor self.loop.start(self.SUPERVISOR_LOOP_INTERVAL) for miner in self.miners: diff --git a/hathor/sysctl/p2p/manager.py b/hathor/sysctl/p2p/manager.py index ed2d4f606..2cfe291a6 100644 --- a/hathor/sysctl/p2p/manager.py +++ b/hathor/sysctl/p2p/manager.py @@ -15,6 +15,7 @@ import os from hathor.p2p.manager import ConnectionsManager +from hathor.p2p.sync_version import SyncVersion from hathor.sysctl.exception import SysctlException from hathor.sysctl.sysctl import Sysctl @@ -32,6 +33,26 @@ def parse_text(text: str) -> list[str]: return ret +def parse_sync_version(name: str) -> SyncVersion: + match name.strip(): + case 'v1': + return SyncVersion.V1_1 + case 'v2': + return SyncVersion.V2 + case _: + raise ValueError('unknown or not implemented') + + +def pretty_sync_version(sync_version: SyncVersion) -> str: + match sync_version: + case SyncVersion.V1_1: + return 'v1' + case SyncVersion.V2: + return 'v2' + case _: + raise ValueError('unknown or not implemented') + + class ConnectionsManagerSysctl(Sysctl): def __init__(self, connections: ConnectionsManager) -> None: super().__init__() @@ -67,6 +88,16 @@ def __init__(self, connections: ConnectionsManager) -> None: None, self.set_always_enable_sync_readtxt, ) + self.register( + 'available_sync_versions', + self.get_available_sync_verions, + None, + ) + self.register( + 'enabled_sync_versions', + self.get_enabled_sync_versions, + self.set_enabled_sync_versions, + ) def set_force_sync_rotate(self) -> None: """Force a sync rotate.""" @@ -134,3 +165,34 @@ def set_max_enabled_sync(self, value: int) -> None: return self.connections.MAX_ENABLED_SYNC = value self.connections._sync_rotate_if_needed(force=True) + + def get_available_sync_verions(self) -> list[str]: + """Return the list of AVAILABLE sync versions.""" + return sorted(map(pretty_sync_version, self.connections.get_available_sync_versions())) + + def get_enabled_sync_versions(self) -> list[str]: + """Return the list of ENABLED sync versions.""" + return sorted(map(pretty_sync_version, self.connections.get_enabled_sync_versions())) + + def set_enabled_sync_versions(self, sync_versions: list[str]) -> None: + """Set the list of ENABLED sync versions.""" + new_sync_versions = set(map(parse_sync_version, sync_versions)) + old_sync_versions = self.connections.get_enabled_sync_versions() + to_enable = new_sync_versions - old_sync_versions + to_disable = old_sync_versions - new_sync_versions + for sync_version in to_enable: + self._enable_sync_version(sync_version) + for sync_version in to_disable: + self._disable_sync_version(sync_version) + + def _enable_sync_version(self, sync_version: SyncVersion) -> None: + """Enable the given sync version, it must be available, otherwise it will fail silently.""" + if not self.connections.is_sync_version_available(sync_version): + self.connections.log.warn('tried to enable a sync version through sysctl, but it is not available', + sync_version=sync_version) + return + self.connections.enable_sync_version(sync_version) + + def _disable_sync_version(self, sync_version: SyncVersion) -> None: + """Disable the given sync version.""" + self.connections.disable_sync_version(sync_version) diff --git a/hathor/sysctl/runner.py b/hathor/sysctl/runner.py index ef75a21b6..6ee32cb7b 100644 --- a/hathor/sysctl/runner.py +++ b/hathor/sysctl/runner.py @@ -76,7 +76,7 @@ def deserialize(self, value_str: str) -> Any: if len(value_str) == 0: return () - parts = [x.strip() for x in value_str.split(',')] + parts = json.loads(f'[{value_str}]') if len(parts) > 1: - return tuple(json.loads(x) for x in parts) + return tuple(parts) return json.loads(value_str) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index ea185893e..453deab09 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -22,25 +22,13 @@ from itertools import chain from math import inf, isfinite, log from struct import error as StructError, pack -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Iterator, Optional +from typing import TYPE_CHECKING, Any, ClassVar, Iterator, Optional from structlog import get_logger from hathor.checkpoint import Checkpoint from hathor.conf.get_settings import get_settings -from hathor.transaction.exceptions import ( - DuplicatedParents, - IncorrectParents, - InvalidOutputScriptSize, - InvalidOutputValue, - InvalidToken, - ParentDoesNotExist, - PowError, - TimestampError, - TooManyOutputs, - TooManySigOps, - WeightError, -) +from hathor.transaction.exceptions import InvalidOutputValue, WeightError from hathor.transaction.transaction_metadata import TransactionMetadata from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.transaction.validation_state import ValidationState @@ -54,8 +42,6 @@ logger = get_logger() -MAX_NONCE = 2**32 - MAX_OUTPUT_VALUE = 2**63 # max value (inclusive) that is possible to encode: 9223372036854775808 ~= 9.22337e+18 _MAX_OUTPUT_VALUE_32 = 2**31 - 1 # max value (inclusive) before having to use 8 bytes: 2147483647 ~= 2.14748e+09 @@ -70,14 +56,6 @@ # Weight (d), timestamp (I), and parents len (B) _GRAPH_FORMAT_STRING = '!dIB' -# tx should have 2 parents, both other transactions -_TX_PARENTS_TXS = 2 -_TX_PARENTS_BLOCKS = 0 - -# blocks have 3 parents, 2 txs and 1 block -_BLOCK_PARENTS_TXS = 2 -_BLOCK_PARENTS_BLOCKS = 1 - # The int value of one byte _ONE_BYTE = 0xFF @@ -161,7 +139,7 @@ def __init__(self, nonce: int = 0, timestamp: Optional[int] = None, signal_bits: int = 0, - version: int = TxVersion.REGULAR_BLOCK, + version: TxVersion = TxVersion.REGULAR_BLOCK, weight: float = 0, inputs: Optional[list['TxInput']] = None, outputs: Optional[list['TxOutput']] = None, @@ -540,157 +518,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: To be implemented by tx/block, used by `self.validate_checkpoint`. Should not modify the validation state.""" raise NotImplementedError - def verify_parents(self) -> None: - """All parents must exist and their timestamps must be smaller than ours. - - Also, txs should have 2 other txs as parents, while blocks should have 2 txs + 1 block. - - Parents must be ordered with blocks first, followed by transactions. - - :raises TimestampError: when our timestamp is less or equal than our parent's timestamp - :raises ParentDoesNotExist: when at least one of our parents does not exist - :raises IncorrectParents: when tx does not confirm the correct number/type of parent txs - """ - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - - assert self.storage is not None - - # check if parents are duplicated - parents_set = set(self.parents) - if len(self.parents) > len(parents_set): - raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in self.parents]) - - my_parents_txs = 0 # number of tx parents - my_parents_blocks = 0 # number of block parents - min_timestamp: Optional[int] = None - - for parent_hash in self.parents: - try: - parent = self.storage.get_transaction(parent_hash) - assert parent.hash is not None - if self.timestamp <= parent.timestamp: - raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( - self.hash_hex, - self.timestamp, - parent.hash_hex, - parent.timestamp, - )) - - if parent.is_block: - if self.is_block and not parent.is_genesis: - if self.timestamp - parent.timestamp > self._settings.MAX_DISTANCE_BETWEEN_BLOCKS: - raise TimestampError('Distance between blocks is too big' - ' ({} seconds)'.format(self.timestamp - parent.timestamp)) - if my_parents_txs > 0: - raise IncorrectParents('Parents which are blocks must come before transactions') - for pi_hash in parent.parents: - pi = self.storage.get_transaction(parent_hash) - if not pi.is_block: - min_timestamp = ( - min(min_timestamp, pi.timestamp) if min_timestamp is not None - else pi.timestamp - ) - my_parents_blocks += 1 - else: - if min_timestamp and parent.timestamp < min_timestamp: - raise TimestampError('tx={} timestamp={}, parent={} timestamp={}, min_timestamp={}'.format( - self.hash_hex, - self.timestamp, - parent.hash_hex, - parent.timestamp, - min_timestamp - )) - my_parents_txs += 1 - except TransactionDoesNotExist: - raise ParentDoesNotExist('tx={} parent={}'.format(self.hash_hex, parent_hash.hex())) - - # check for correct number of parents - if self.is_block: - parents_txs = _BLOCK_PARENTS_TXS - parents_blocks = _BLOCK_PARENTS_BLOCKS - else: - parents_txs = _TX_PARENTS_TXS - parents_blocks = _TX_PARENTS_BLOCKS - if my_parents_blocks != parents_blocks: - raise IncorrectParents('wrong number of parents (block type): {}, expecting {}'.format( - my_parents_blocks, parents_blocks)) - if my_parents_txs != parents_txs: - raise IncorrectParents('wrong number of parents (tx type): {}, expecting {}'.format( - my_parents_txs, parents_txs)) - - def verify_pow(self, override_weight: Optional[float] = None) -> None: - """Verify proof-of-work - - :raises PowError: when the hash is equal or greater than the target - """ - assert self.hash is not None - numeric_hash = int(self.hash_hex, self.HEX_BASE) - minimum_target = self.get_target(override_weight) - if numeric_hash >= minimum_target: - raise PowError(f'Transaction has invalid data ({numeric_hash} < {minimum_target})') - - def verify_number_of_outputs(self) -> None: - """Verify number of outputs does not exceeds the limit""" - if len(self.outputs) > self._settings.MAX_NUM_OUTPUTS: - raise TooManyOutputs('Maximum number of outputs exceeded') - - def verify_sigops_output(self) -> None: - """ Count sig operations on all outputs and verify that the total sum is below the limit - """ - from hathor.transaction.scripts import get_sigops_count - n_txops = 0 - - for tx_output in self.outputs: - n_txops += get_sigops_count(tx_output.script) - - if n_txops > self._settings.MAX_TX_SIGOPS_OUTPUT: - raise TooManySigOps('TX[{}]: Maximum number of sigops for all outputs exceeded ({})'.format( - self.hash_hex, n_txops)) - - def verify_outputs(self) -> None: - """Verify there are no hathor authority UTXOs and outputs are all positive - - :raises InvalidToken: when there's a hathor authority utxo - :raises InvalidOutputValue: output has negative value - :raises TooManyOutputs: when there are too many outputs - """ - self.verify_number_of_outputs() - for index, output in enumerate(self.outputs): - # no hathor authority UTXO - if (output.get_token_index() == 0) and output.is_token_authority(): - raise InvalidToken('Cannot have authority UTXO for hathor tokens: {}'.format( - output.to_human_readable())) - - # output value must be positive - if output.value <= 0: - raise InvalidOutputValue('Output value must be a positive integer. Value: {} and index: {}'.format( - output.value, index)) - - if len(output.script) > self._settings.MAX_OUTPUT_SCRIPT_SIZE: - raise InvalidOutputScriptSize('size: {} and max-size: {}'.format( - len(output.script), self._settings.MAX_OUTPUT_SCRIPT_SIZE - )) - - def resolve(self, update_time: bool = False) -> bool: - """Run a CPU mining looking for the nonce that solves the proof-of-work - - The `self.weight` must be set before calling this method. - - :param update_time: update timestamp every 2 seconds - :return: True if a solution was found - :rtype: bool - """ - hash_bytes = self.start_mining(update_time=update_time) - - if hash_bytes: - self.hash = hash_bytes - metadata = getattr(self, '_metadata', None) - if metadata is not None and metadata.hash is not None: - metadata.hash = hash_bytes - return True - else: - return False - def get_funds_hash(self) -> bytes: """Return the sha256 of the funds part of the transaction @@ -760,41 +587,6 @@ def update_hash(self) -> None: """ self.hash = self.calculate_hash() - def start_mining(self, start: int = 0, end: int = MAX_NONCE, sleep_seconds: float = 0.0, update_time: bool = True, - *, should_stop: Callable[[], bool] = lambda: False) -> Optional[VertexId]: - """Starts mining until it solves the problem, i.e., finds the nonce that satisfies the conditions - - :param start: beginning of the search interval - :param end: end of the search interval - :param sleep_seconds: the number of seconds it will sleep after each attempt - :param update_time: update timestamp every 2 seconds - :return The hash of the solved PoW or None when it is not found - """ - pow_part1 = self.calculate_hash1() - target = self.get_target() - self.nonce = start - last_time = time.time() - while self.nonce < end: - if update_time: - now = time.time() - if now - last_time > 2: - if should_stop(): - return None - self.timestamp = int(now) - pow_part1 = self.calculate_hash1() - last_time = now - self.nonce = start - - result = self.calculate_hash2(pow_part1.copy()) - if int(result.hex(), self.HEX_BASE) < target: - return result - self.nonce += 1 - if sleep_seconds > 0: - time.sleep(sleep_seconds) - if should_stop(): - return None - return None - def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) -> TransactionMetadata: """Return this tx's metadata. @@ -823,12 +615,6 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) # happens include generating new mining blocks and some tests height = self.calculate_height() if self.storage else None score = self.weight if self.is_genesis else 0 - kwargs: dict[str, Any] = {} - - if self.is_block: - from hathor.transaction import Block - assert isinstance(self, Block) - kwargs['feature_activation_bit_counts'] = self.calculate_feature_activation_bit_counts() metadata = TransactionMetadata( hash=self.hash, @@ -836,7 +622,6 @@ def get_metadata(self, *, force_reload: bool = False, use_storage: bool = True) height=height, score=score, min_height=0, - **kwargs ) self._metadata = metadata if not metadata.hash: @@ -920,7 +705,6 @@ def update_initial_metadata(self, *, save: bool = True) -> None: self._update_height_metadata() self._update_parents_children_metadata() self._update_reward_lock_metadata() - self._update_feature_activation_bit_counts_metadata() if save: assert self.storage is not None self.storage.save_transaction(self, only_metadata=True) @@ -946,16 +730,6 @@ def _update_parents_children_metadata(self) -> None: metadata.children.append(self.hash) self.storage.save_transaction(parent, only_metadata=True) - def _update_feature_activation_bit_counts_metadata(self) -> None: - """Update the block feature_activation_bit_counts metadata.""" - if not self.is_block: - return - - from hathor.transaction import Block - assert isinstance(self, Block) - metadata = self.get_metadata() - metadata.feature_activation_bit_counts = self.calculate_feature_activation_bit_counts() - def update_timestamp(self, now: int) -> None: """Update this tx's timestamp @@ -1051,13 +825,13 @@ def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> dict[str, Any]: return ret - def clone(self) -> 'BaseTransaction': + def clone(self, *, include_metadata: bool = True) -> 'BaseTransaction': """Return exact copy without sharing memory, including metadata if loaded. :return: Transaction or Block copy """ new_tx = self.create_from_struct(self.get_struct()) - if hasattr(self, '_metadata'): + if hasattr(self, '_metadata') and include_metadata: assert self._metadata is not None # FIXME: is this actually true or do we have to check if not None new_tx._metadata = self._metadata.clone() new_tx.storage = self.storage diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index bef6f3368..80f9ee67d 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -18,21 +18,12 @@ from struct import pack from typing import TYPE_CHECKING, Any, Optional -from hathor import daa from hathor.checkpoint import Checkpoint from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, TxOutput, TxVersion -from hathor.transaction.exceptions import ( - BlockWithInputs, - BlockWithTokensError, - CheckpointError, - InvalidBlockReward, - RewardLocked, - TransactionDataError, - WeightError, -) +from hathor.transaction.exceptions import CheckpointError from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.util import not_none from hathor.utils.int import get_bit_list @@ -56,7 +47,7 @@ def __init__(self, nonce: int = 0, timestamp: Optional[int] = None, signal_bits: int = 0, - version: int = TxVersion.REGULAR_BLOCK, + version: TxVersion = TxVersion.REGULAR_BLOCK, weight: float = 0, outputs: Optional[list[TxOutput]] = None, parents: Optional[list[bytes]] = None, @@ -119,21 +110,27 @@ def calculate_min_height(self) -> int: return min_height - def calculate_feature_activation_bit_counts(self) -> list[int]: + def get_feature_activation_bit_counts(self) -> list[int]: """ - Calculates the feature_activation_bit_counts metadata attribute, which is a list of feature activation bit - counts. + Lazily calculates the feature_activation_bit_counts metadata attribute, which is a list of feature activation + bit counts. After it's calculated for the first time, it's persisted in block metadata and must not be changed. Each list index corresponds to a bit position, and its respective value is the rolling count of active bits from the previous boundary block up to this block, including it. LSB is on the left. """ + metadata = self.get_metadata() + + if metadata.feature_activation_bit_counts is not None: + return metadata.feature_activation_bit_counts + previous_counts = self._get_previous_feature_activation_bit_counts() bit_list = self._get_feature_activation_bit_list() count_and_bit_pairs = zip_longest(previous_counts, bit_list, fillvalue=0) updated_counts = starmap(add, count_and_bit_pairs) + metadata.feature_activation_bit_counts = list(updated_counts) - return list(updated_counts) + return metadata.feature_activation_bit_counts def _get_previous_feature_activation_bit_counts(self) -> list[int]: """ @@ -337,55 +334,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: # TODO: check whether self is a parent of any checkpoint-valid block, this is left for a future PR pass - def verify_weight(self) -> None: - """Validate minimum block difficulty.""" - block_weight = daa.calculate_block_difficulty(self) - if self.weight < block_weight - self._settings.WEIGHT_TOL: - raise WeightError(f'Invalid new block {self.hash_hex}: weight ({self.weight}) is ' - f'smaller than the minimum weight ({block_weight})') - - def verify_height(self) -> None: - """Validate that the block height is enough to confirm all transactions being confirmed.""" - meta = self.get_metadata() - assert meta.height is not None - assert meta.min_height is not None - if meta.height < meta.min_height: - raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') - - def verify_reward(self) -> None: - """Validate reward amount.""" - parent_block = self.get_block_parent() - tokens_issued_per_block = daa.get_tokens_issued_per_block(parent_block.get_height() + 1) - if self.sum_outputs != tokens_issued_per_block: - raise InvalidBlockReward( - f'Invalid number of issued tokens tag=invalid_issued_tokens tx.hash={self.hash_hex} ' - f'issued={self.sum_outputs} allowed={tokens_issued_per_block}' - ) - - def verify_no_inputs(self) -> None: - inputs = getattr(self, 'inputs', None) - if inputs: - raise BlockWithInputs('number of inputs {}'.format(len(inputs))) - - def verify_outputs(self) -> None: - super().verify_outputs() - for output in self.outputs: - if output.get_token_index() > 0: - raise BlockWithTokensError('in output: {}'.format(output.to_human_readable())) - - def verify_data(self) -> None: - if len(self.data) > self._settings.BLOCK_DATA_MAX_SIZE: - raise TransactionDataError('block data has {} bytes'.format(len(self.data))) - - def verify_without_storage(self) -> None: - """ Run all verifications that do not need a storage. - """ - self.verify_pow() - self.verify_no_inputs() - self.verify_outputs() - self.verify_data() - self.verify_sigops_output() - def get_base_hash(self) -> bytes: from hathor.merged_mining.bitcoin import sha256d_hash return sha256d_hash(self.get_header_without_nonce()) @@ -396,13 +344,6 @@ def get_height(self) -> int: assert meta.height is not None return meta.height - def get_feature_activation_bit_counts(self) -> list[int]: - """Returns the block's feature_activation_bit_counts metadata attribute.""" - metadata = self.get_metadata() - assert metadata.feature_activation_bit_counts is not None, 'Blocks must always have this attribute set.' - - return metadata.feature_activation_bit_counts - def _get_feature_activation_bit_list(self) -> list[int]: """ Extracts feature activation bits from the signal bits, as a list where each index corresponds to the bit @@ -430,15 +371,30 @@ def get_feature_state(self, *, feature: Feature) -> Optional[FeatureState]: return feature_states.get(feature) - def update_feature_state(self, *, feature: Feature, state: FeatureState) -> None: - """Updates the state of a feature in metadata and persists it.""" + def set_feature_state(self, *, feature: Feature, state: FeatureState, save: bool = False) -> None: + """ + Set the state of a feature in metadata, if it's not set. Fails if it's set and the value is different. + + Args: + feature: the feature to set the state of. + state: the state to set. + save: whether to save this block's metadata in storage. + """ + previous_state = self.get_feature_state(feature=feature) + + if state == previous_state: + return + + assert previous_state is None assert self.storage is not None + metadata = self.get_metadata() feature_states = metadata.feature_states or {} feature_states[feature] = state metadata.feature_states = feature_states - self.storage.save_transaction(self, only_metadata=True) + if save: + self.storage.save_transaction(self, only_metadata=True) def get_feature_activation_bit_value(self, bit: int) -> int: """Get the feature activation bit value for a specific bit position.""" diff --git a/hathor/transaction/exceptions.py b/hathor/transaction/exceptions.py index 6c1a3eb56..25e61596c 100644 --- a/hathor/transaction/exceptions.py +++ b/hathor/transaction/exceptions.py @@ -146,6 +146,10 @@ class CheckpointError(BlockError): """Block hash does not match checkpoint hash for its height""" +class BlockMustSignalError(BlockError): + """Block does not signal support for a feature during mandatory signaling.""" + + class ScriptError(HathorError): """Base class for script evaluation errors""" diff --git a/hathor/transaction/merge_mined_block.py b/hathor/transaction/merge_mined_block.py index 121011a23..a0664d3ae 100644 --- a/hathor/transaction/merge_mined_block.py +++ b/hathor/transaction/merge_mined_block.py @@ -28,7 +28,7 @@ def __init__(self, nonce: int = 0, timestamp: Optional[int] = None, signal_bits: int = 0, - version: int = TxVersion.MERGE_MINED_BLOCK, + version: TxVersion = TxVersion.MERGE_MINED_BLOCK, weight: float = 0, outputs: Optional[list[TxOutput]] = None, parents: Optional[list[bytes]] = None, @@ -74,13 +74,3 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - del json['nonce'] json['aux_pow'] = bytes(self.aux_pow).hex() if self.aux_pow else None return json - - def verify_without_storage(self) -> None: - self.verify_aux_pow() - super().verify_without_storage() - - def verify_aux_pow(self) -> None: - """ Verify auxiliary proof-of-work (for merged mining). - """ - assert self.aux_pow is not None - self.aux_pow.verify(self.get_base_hash()) diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index 438d1f23d..897bd0ead 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -17,8 +17,8 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight from hathor.exception import InvalidNewTransaction +from hathor.manager import HathorManager from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import create_output_script from hathor.util import api_catch_exceptions, json_dumpb, json_loadb @@ -50,7 +50,7 @@ class CreateTxResource(Resource): """ isLeaf = True - def __init__(self, manager): + def __init__(self, manager: HathorManager) -> None: # Important to have the manager so we can know the tx_storage self.manager = manager @@ -88,8 +88,8 @@ def render_POST(self, request): for tx_input in fake_signed_tx.inputs: # conservative estimate of the input data size to estimate a valid weight tx_input.data = b'\0' * 107 - tx.weight = minimum_tx_weight(fake_signed_tx) - tx.verify_unsigned_skip_pow() + tx.weight = self.manager.daa.minimum_tx_weight(fake_signed_tx) + self._verify_unsigned_skip_pow(tx) if tx.is_double_spending(): raise InvalidNewTransaction('At least one of your inputs has already been spent.') @@ -105,6 +105,21 @@ def render_POST(self, request): 'data': data, }) + def _verify_unsigned_skip_pow(self, tx: Transaction) -> None: + """ Same as .verify but skipping pow and signature verification.""" + assert type(tx) is Transaction + verifiers = self.manager.verification_service.verifiers + verifiers.tx.verify_number_of_inputs(tx) + verifiers.vertex.verify_number_of_outputs(tx) + verifiers.vertex.verify_outputs(tx) + verifiers.tx.verify_output_token_indexes(tx) + verifiers.vertex.verify_sigops_output(tx) + verifiers.tx.verify_sigops_input(tx) + # need to run verify_inputs first to check if all inputs exist + verifiers.tx.verify_inputs(tx, skip_script=True) + verifiers.vertex.verify_parents(tx) + verifiers.tx.verify_sum(tx.get_complete_token_info()) + CreateTxResource.openapi = { '/create_tx': { diff --git a/hathor/transaction/scripts.py b/hathor/transaction/scripts.py deleted file mode 100644 index a3e73cea6..000000000 --- a/hathor/transaction/scripts.py +++ /dev/null @@ -1,1645 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import datetime -import re -import struct -from abc import ABC, abstractmethod -from enum import IntEnum -from typing import Any, Callable, Generator, NamedTuple, Optional, Pattern, Union - -from cryptography.exceptions import InvalidSignature -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ec - -from hathor.conf.get_settings import get_settings -from hathor.crypto.util import ( - decode_address, - get_address_b58_from_bytes, - get_address_b58_from_public_key_hash, - get_address_b58_from_redeem_script_hash, - get_hash160, - get_public_key_from_bytes_compressed, - is_pubkey_compressed, -) -from hathor.transaction import BaseTransaction, Transaction, TxInput -from hathor.transaction.exceptions import ( - DataIndexError, - EqualVerifyFailed, - FinalStackInvalid, - InvalidScriptError, - InvalidStackData, - MissingStackItems, - OracleChecksigFailed, - OutOfData, - ScriptError, - TimeLocked, - VerifyFailed, -) - -# XXX: Because the Stack is a heterogeneous list of bytes and int, and some OPs only work for when the stack has some -# or the other type, there are many places that require an assert to prevent the wrong type from being used, -# alternatives include: 1. only using `list[bytes]` and operations that work on `int` to build them from `bytes`, -# 2. using `bytearray` instead of `list[...]` and using type codes on the stack or at least value sizes on the -# stack and OPs should use the extra info accordingly 3. using some "in stack error" at least custom exceptions -# for signaling that an OP was applied on a wrongly typed stack. -Stack = list[Union[bytes, int, str]] - - -class ScriptExtras(NamedTuple): - tx: Transaction - txin: TxInput - spent_tx: BaseTransaction - - -class OpcodePosition(NamedTuple): - opcode: int - position: int - - -def re_compile(pattern: str) -> Pattern[bytes]: - """ Transform a given script pattern into a regular expression. - - The script pattern is like a regular expression, but you may include five - special symbols: - (i) OP_DUP, OP_HASH160, and all other opcodes; - (ii) DATA_: data with the specified length; - (iii) NUMBER: a 4-byte integer; - (iv) BLOCK: a variable length block, to be parsed later - - Example: - >>> r = re_compile( - ... '^(?:DATA_4 OP_GREATERTHAN_TIMESTAMP)? ' - ... 'OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKSIG$' - ... ) - - :return: A compiled regular expression matcher - :rtype: :py:class:`re.Pattern` - """ - - def _to_byte_pattern(m): - x = m.group().decode('ascii').strip() - if x.startswith('OP_'): - return bytes([Opcode[x]]) - elif x.startswith('DATA_'): - length = int(m.group()[5:]) - return _re_pushdata(length) - elif x.startswith('NUMBER'): - return b'.{5}' - elif x.startswith('BLOCK'): - return b'.*' - else: - raise ValueError('Invalid opcode: {}'.format(x)) - - p = pattern.encode('ascii') - p = re.sub(rb'\s*([A-Z0-9_]+)\s*', _to_byte_pattern, p) - return re.compile(p, re.DOTALL) - - -def _re_pushdata(length: int) -> bytes: - """ Create a regular expression that matches a data block with a given length. - - :return: A non-compiled regular expression - :rtype: bytes - """ - ret = [bytes([Opcode.OP_PUSHDATA1]), bytes([length]), b'.{', str(length).encode('ascii'), b'}'] - - if length <= 75: - # for now, we accept <= 75 bytes with OP_PUSHDATA1. It's optional - ret.insert(1, b'?') - - return b''.join(ret) - - -class Opcode(IntEnum): - OP_0 = 0x50 - OP_1 = 0x51 - OP_2 = 0x52 - OP_3 = 0x53 - OP_4 = 0x54 - OP_5 = 0x55 - OP_6 = 0x56 - OP_7 = 0x57 - OP_8 = 0x58 - OP_9 = 0x59 - OP_10 = 0x5a - OP_11 = 0x5b - OP_12 = 0x5c - OP_13 = 0x5d - OP_14 = 0x5e - OP_15 = 0x5f - OP_16 = 0x60 - OP_DUP = 0x76 - OP_EQUAL = 0x87 - OP_EQUALVERIFY = 0x88 - OP_CHECKSIG = 0xAC - OP_HASH160 = 0xA9 - OP_PUSHDATA1 = 0x4C - OP_GREATERTHAN_TIMESTAMP = 0x6F - OP_CHECKMULTISIG = 0xAE - OP_CHECKDATASIG = 0xBA - OP_DATA_STREQUAL = 0xC0 - OP_DATA_GREATERTHAN = 0xC1 - OP_FIND_P2PKH = 0xD0 - OP_DATA_MATCH_VALUE = 0xD1 - - @classmethod - def is_pushdata(cls, opcode: int) -> bool: - """ Check if `opcode` represents an operation of pushing data on stack - """ - if 1 <= opcode <= 75: - # case: push [1,75] bytes on stack (op_pushdata) - return True - elif cls.OP_0 <= opcode <= cls.OP_16: - # case: push integer on stack (op_integer) - return True - elif opcode == cls.OP_PUSHDATA1: - # case: op_pushdata1 - return True - # ...Any other case - return False - - @classmethod - def is_valid_opcode(cls, opcode: int) -> bool: - """ Check if `opcode` is valid - - check for pushdata first to validate unconventional opcodes for data - - check for conventional opcode - """ - if cls.is_pushdata(opcode): - return True - try: - cls(opcode) - except ValueError: - return False - else: - return True - - -class HathorScript: - """This class is supposed to be help build scripts abstracting some corner cases. - - For example, when pushing data to the stack, we may or may not have to use OP_PUSHDATA. - This is the sequence we have to add to the script: - - len(data) <= 75: [len(data) data] - - len(data) > 75: [OP_PUSHDATA1 len(data) data] - - pushData abstracts this differences and presents an unique interface. - """ - def __init__(self) -> None: - self.data = b'' - - def addOpcode(self, opcode: Opcode) -> None: - self.data += bytes([opcode]) - - def pushData(self, data: Union[int, bytes]) -> None: - if isinstance(data, int): - if data > 4294967295: - n = struct.pack('!Q', data) - elif data > 65535: - n = struct.pack('!I', data) - elif data > 255: - n = struct.pack('!H', data) - else: - n = struct.pack('!B', data) - data = n - if len(data) <= 75: - self.data += (bytes([len(data)]) + data) - else: - self.data += (bytes([Opcode.OP_PUSHDATA1]) + bytes([len(data)]) + data) - - -class BaseScript(ABC): - """ - This class holds common methods for different script types to help abstracting the script type. - """ - - @abstractmethod - def to_human_readable(self) -> dict[str, Any]: - """Return a nice dict for using on informational json APIs.""" - raise NotImplementedError - - @abstractmethod - def get_type(self) -> str: - """Get script type name""" - raise NotImplementedError - - @abstractmethod - def get_script(self) -> bytes: - """Get or build script""" - raise NotImplementedError - - @abstractmethod - def get_address(self) -> Optional[str]: - """Get address for this script, not all valid recognizable scripts have addresses.""" - raise NotImplementedError - - @abstractmethod - def get_timelock(self) -> Optional[int]: - """Get timelock for this script, completely optional.""" - raise NotImplementedError - - -class P2PKH(BaseScript): - re_match = re_compile('^(?:(DATA_4) OP_GREATERTHAN_TIMESTAMP)? ' - 'OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKSIG$') - - def __init__(self, address: str, timelock: Optional[int] = None) -> None: - """This class represents the pay to public hash key script. It enables the person - who has the corresponding private key of the address to spend the tokens. - - This script validates the signature and public key on the corresponding input - data. The public key is first checked against the script address and then the - signature is verified, which means the sender owns the corresponding private key. - - Output script and the corresponding input data are usually represented like: - input data: OP_DUP OP_HASH160 OP_EQUALVERIFY OP_CHECKSIG - output script: - - :param address: address to send tokens - :type address: string(base58) - - :param timelock: timestamp until when it's locked - :type timelock: int - """ - self.address = address - self.timelock = timelock - - def to_human_readable(self) -> dict[str, Any]: - ret: dict[str, Any] = {} - ret['type'] = self.get_type() - ret['address'] = self.address - ret['timelock'] = self.timelock - return ret - - def get_type(self) -> str: - return 'P2PKH' - - def get_script(self) -> bytes: - return P2PKH.create_output_script(decode_address(self.address), self.timelock) - - def get_address(self) -> Optional[str]: - return self.address - - def get_timelock(self) -> Optional[int]: - return self.timelock - - @classmethod - def create_output_script(cls, address: bytes, timelock: Optional[Any] = None) -> bytes: - """ - :param address: address to send tokens - :type address: bytes - - :param timelock: timestamp until when the output is locked - :type timelock: bytes - - :rtype: bytes - """ - assert len(address) == 25 - public_key_hash = address[1:-4] - s = HathorScript() - if timelock: - s.pushData(timelock) - s.addOpcode(Opcode.OP_GREATERTHAN_TIMESTAMP) - s.addOpcode(Opcode.OP_DUP) - s.addOpcode(Opcode.OP_HASH160) - s.pushData(public_key_hash) - s.addOpcode(Opcode.OP_EQUALVERIFY) - s.addOpcode(Opcode.OP_CHECKSIG) - return s.data - - @classmethod - def create_input_data(cls, public_key_bytes: bytes, signature: bytes) -> bytes: - """ - :param private_key: key corresponding to the address we want to spend tokens from - :type private_key: :py:class:`cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey` - - :rtype: bytes - """ - s = HathorScript() - s.pushData(signature) - s.pushData(public_key_bytes) - return s.data - - @classmethod - def parse_script(cls, script: bytes) -> Optional['P2PKH']: - """Checks if the given script is of type p2pkh. If it is, returns the P2PKH object. - Otherwise, returns None. - - :param script: script to check - :type script: bytes - - :rtype: :py:class:`hathor.transaction.scripts.P2PKH` or None - """ - match = cls.re_match.search(script) - if match: - groups = match.groups() - timelock = None - pushdata_timelock = groups[0] - if pushdata_timelock: - timelock_bytes = pushdata_timelock[1:] - timelock = struct.unpack('!I', timelock_bytes)[0] - pushdata_address = groups[1] - public_key_hash = get_pushdata(pushdata_address) - address_b58 = get_address_b58_from_public_key_hash(public_key_hash) - return cls(address_b58, timelock) - return None - - -class MultiSig(BaseScript): - re_match = re_compile('^(?:(DATA_4) OP_GREATERTHAN_TIMESTAMP)? ' 'OP_HASH160 (DATA_20) OP_EQUAL$') - - def __init__(self, address: str, timelock: Optional[Any] = None) -> None: - """This class represents the multi signature script (MultiSig). It enables the group of persons - who has the corresponding private keys of the address to spend the tokens. - - This script validates the signatures and public keys on the corresponding input - data. - - Output script and the corresponding input data are usually represented like: - output script: OP_HASH160 OP_EQUAL - input data: ... - - :param address: address to send tokens - :type address: string(base58) - - :param timelock: timestamp until when it's locked - :type timelock: int - """ - self.address = address - self.timelock = timelock - - def to_human_readable(self) -> dict[str, Any]: - """ Decode MultiSig class to dict with its type and data - - :return: dict with MultiSig info - :rtype: dict[str:] - """ - ret: dict[str, Any] = {} - ret['type'] = self.get_type() - ret['address'] = self.address - ret['timelock'] = self.timelock - return ret - - def get_type(self) -> str: - return 'MultiSig' - - def get_script(self) -> bytes: - return MultiSig.create_output_script(decode_address(self.address), self.timelock) - - def get_address(self) -> Optional[str]: - return self.address - - def get_timelock(self) -> Optional[int]: - return self.timelock - - @classmethod - def get_multisig_redeem_script_pos(cls, input_data: bytes) -> int: - """ Get the position of the opcode that pushed the redeem_script on the stack - - :param input_data: data from the input being evaluated - :type input_data: bytes - - :return: position of pushdata for redeem_script - :rtype: int - """ - pos = 0 - last_pos = 0 - data_len = len(input_data) - while pos < data_len: - last_pos = pos - _, pos = get_script_op(pos, input_data) - return last_pos - - @classmethod - def create_output_script(cls, address: bytes, timelock: Optional[Any] = None) -> bytes: - """ - :param address: address to send tokens - :type address: bytes - - :param timelock: timestamp until when the output is locked - :type timelock: bytes - - :rtype: bytes - """ - assert len(address) == 25 - redeem_script_hash = address[1:-4] - s = HathorScript() - if timelock: - s.pushData(timelock) - s.addOpcode(Opcode.OP_GREATERTHAN_TIMESTAMP) - s.addOpcode(Opcode.OP_HASH160) - s.pushData(redeem_script_hash) - s.addOpcode(Opcode.OP_EQUAL) - return s.data - - @classmethod - def create_input_data(cls, redeem_script: bytes, signatures: list[bytes]) -> bytes: - """ - :param redeem_script: script to redeem the tokens: ... - :type redeem_script: bytes - - :param signatures: array of signatures to validate the input and redeem the tokens - :type signagures: list[bytes] - - :rtype: bytes - """ - s = HathorScript() - for signature in signatures: - s.pushData(signature) - s.pushData(redeem_script) - return s.data - - @classmethod - def parse_script(cls, script: bytes) -> Optional['MultiSig']: - """Checks if the given script is of type multisig. If it is, returns the MultiSig object. - Otherwise, returns None. - - :param script: script to check - :type script: bytes - - :rtype: :py:class:`hathor.transaction.scripts.MultiSig` or None - """ - match = cls.re_match.search(script) - if match: - groups = match.groups() - timelock = None - pushdata_timelock = groups[0] - if pushdata_timelock: - timelock_bytes = pushdata_timelock[1:] - timelock = struct.unpack('!I', timelock_bytes)[0] - redeem_script_hash = get_pushdata(groups[1]) - address_b58 = get_address_b58_from_redeem_script_hash(redeem_script_hash) - return cls(address_b58, timelock) - return None - - @classmethod - def get_multisig_data(cls, input_data: bytes) -> bytes: - """ Input data has many signatures and a block with the redeem script - In the second part of the script eval we need to evaluate the redeem script - so we need to get the redeem script without the block, to evaluate the elements on it - - This method removes the (possible) OP_PUSHDATA1 byte and the redeem script length, - so it can be evaluated as any normal script - - :param input_data: data from the input being evaluated - :type input_data: bytes - - :return: data ready to be evaluated. The signatures and the redeem script - :rtype: bytes - """ - pos = 0 - last_pos = 0 - stack: Stack = [] - data_len = len(input_data) - while pos < data_len: - last_pos = pos - opcode = input_data[pos] - if (opcode >= 1 and opcode <= 75): - pos = op_pushdata(pos, input_data, stack) - elif opcode == Opcode.OP_PUSHDATA1: - pos = op_pushdata1(pos, input_data, stack) - else: - pos += 1 - - redeem_script = stack[-1] - assert isinstance(redeem_script, bytes) - return input_data[:last_pos] + redeem_script - - -# XXX: does it make sense to make this BaseScript too? -class NanoContractMatchValues: - re_match = re_compile('^OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKDATASIG OP_0 (BLOCK) OP_DATA_STREQUAL ' - 'OP_1 (NUMBER) OP_DATA_GREATERTHAN OP_2 (BLOCK) OP_DATA_MATCH_VALUE OP_FIND_P2PKH$') - - def __init__(self, oracle_pubkey_hash, min_timestamp, oracle_data_id, value_dict, fallback_pubkey_hash=b'\x00'): - """This class represents a nano contract that tries to match on a single value. The pubKeyHash - associated with the data given by the oracle will be able to spend the contract tokens. - - :param oracle_pubkey_hash: oracle's public key after being hashed by SHA256 and RIPMD160 - :type oracle_pubkey_hash: bytes - - :param min_timestamp: contract can only be spent after this timestamp. If we don't need it, simply - pass same timestamp as transaction - :type min_timestamp: int - - :param oracle_data_id: unique id for the data reported by the oracle. For eg, a oracle that reports - stock prices can use stock ticker symbols as this id - :type oracle_data_id: bytes - - :param value_dict: a dictionary with the pubKeyHash and corresponding value ({pubKeyHash, value}). - The pubkeyHash with value matching the data sent by oracle will be able to spend the contract funds - :type value_dict: dict[bytes, int] - - :param fallback_pubkey_hash: if none of the values match, this pubkey hash identifies the winner address - :type fallback_pubkey_hash: bytes - """ - self.oracle_pubkey_hash = oracle_pubkey_hash - self.min_timestamp = min_timestamp - self.oracle_data_id = oracle_data_id - self.value_dict = value_dict # dict[bytes, int] - self.fallback_pubkey_hash = fallback_pubkey_hash - - def to_human_readable(self) -> dict[str, Any]: - ret: dict[str, Any] = {} - ret['type'] = 'NanoContractMatchValues' - ret['oracle_pubkey_hash'] = base64.b64encode(self.oracle_pubkey_hash).decode('utf-8') - ret['min_timestamp'] = self.min_timestamp - ret['oracle_data_id'] = self.oracle_data_id.decode('utf-8') - ret['value_dict'] = {get_address_b58_from_bytes(k): v for k, v in self.value_dict.items()} - try: - if len(self.fallback_pubkey_hash) == 1: - ret['fallback_pubkey_hash'] = None - else: - ret['fallback_pubkey_hash'] = get_address_b58_from_bytes(self.fallback_pubkey_hash) - except TypeError: - ret['fallback_pubkey_hash'] = None - return ret - - def create_output_script(self) -> bytes: - """ - :return: the output script in binary - :rtype: bytes - """ - s = HathorScript() - s.addOpcode(Opcode.OP_DUP) - s.addOpcode(Opcode.OP_HASH160) - s.pushData(self.oracle_pubkey_hash) - s.addOpcode(Opcode.OP_EQUALVERIFY) - s.addOpcode(Opcode.OP_CHECKDATASIG) - # compare first value from data with oracle_data_id - s.addOpcode(Opcode.OP_0) - s.pushData(self.oracle_data_id) - s.addOpcode(Opcode.OP_DATA_STREQUAL) - # compare second value from data with min_timestamp - s.addOpcode(Opcode.OP_1) - s.pushData(struct.pack('!I', self.min_timestamp)) - s.addOpcode(Opcode.OP_DATA_GREATERTHAN) - # finally, compare third value with values on dict - s.addOpcode(Opcode.OP_2) - s.pushData(self.fallback_pubkey_hash) - for pubkey_hash, value in self.value_dict.items(): - s.pushData(value) - s.pushData(pubkey_hash) - # we use int as bytes because it may be greater than 16 - # TODO should we limit it to 16? - s.pushData(len(self.value_dict)) - s.addOpcode(Opcode.OP_DATA_MATCH_VALUE) - # pubkey left on stack should be on outputs - s.addOpcode(Opcode.OP_FIND_P2PKH) - return s.data - - @classmethod - def create_input_data(cls, data: bytes, oracle_sig: bytes, oracle_pubkey: bytes) -> bytes: - """ - :param data: data from the oracle - :type data: bytes - - :param oracle_sig: the data signed by the oracle, with its private key - :type oracle_sig: bytes - - :param oracle_pubkey: the oracle's public key - :type oracle_pubkey: bytes - - :rtype: bytes - """ - s = HathorScript() - s.pushData(data) - s.pushData(oracle_sig) - s.pushData(oracle_pubkey) - return s.data - - @classmethod - def parse_script(cls, script: bytes) -> Optional['NanoContractMatchValues']: - """Checks if the given script is of type NanoContractMatchValues. If it is, returns the corresponding object. - Otherwise, returns None. - - :param script: script to check - :type script: bytes - - :rtype: :py:class:`hathor.transaction.scripts.NanoContractMatchValues` or None - """ - # regex for this is a bit tricky, as some data has variable length. We first match the base regex for this - # script and later manually parse variable length fields - match = cls.re_match.search(script) - if match: - groups = match.groups() - # oracle pubkey hash - oracle_pubkey_hash = get_pushdata(groups[0]) - # oracle data id - oracle_data_id = get_pushdata(groups[1]) - # timestamp - timestamp = groups[2] - min_timestamp = binary_to_int(timestamp[1:]) - - # variable length data. We'll parse it manually. It should have the following format: - # fallback_pubkey_hash, [valueN, pubkey_hash_N], N - extra_data = groups[3] - - fallback_pubkey_len = extra_data[0] - if len(extra_data) < fallback_pubkey_len + 2: - # extra data has at least the fallback_pubkey length (1 byte) and number of - # values (N, after values and pubkeys). That's why we use fallback_pubkey_len + 2 - return None - fallback_pubkey = extra_data[1] if fallback_pubkey_len == 1 else extra_data[1:fallback_pubkey_len] - n_values = extra_data[-1] - - values_pubkeys = extra_data[(fallback_pubkey_len + 1):-2] - value_dict = {} - pos = 0 - for i in range(n_values): - if len(values_pubkeys[pos:]) < 1: - return None - value_len = values_pubkeys[pos] - pos += 1 - if len(values_pubkeys[pos:]) < value_len: - return None - value = values_pubkeys[pos] if value_len == 1 else binary_to_int(values_pubkeys[pos:(pos + value_len)]) - pos += value_len - if len(values_pubkeys[pos:]) < 1: - return None - pubkey_len = values_pubkeys[pos] - pos += 1 - if len(values_pubkeys[pos:]) < pubkey_len: - return None - pubkey = values_pubkeys[pos:(pos + pubkey_len)] - pos += pubkey_len - value_dict[pubkey] = value - - if len(values_pubkeys[pos:]) > 0: - # shouldn't have data left - return None - - return NanoContractMatchValues(oracle_pubkey_hash, min_timestamp, oracle_data_id, value_dict, - fallback_pubkey) - return None - - -def create_base_script(address: str, timelock: Optional[Any] = None) -> BaseScript: - """ Verifies if address is P2PKH or Multisig and return the corresponding BaseScript implementation. - """ - settings = get_settings() - baddress = decode_address(address) - if baddress[0] == binary_to_int(settings.P2PKH_VERSION_BYTE): - return P2PKH(address, timelock) - elif baddress[0] == binary_to_int(settings.MULTISIG_VERSION_BYTE): - return MultiSig(address, timelock) - else: - raise ScriptError('The address is not valid') - - -def create_output_script(address: bytes, timelock: Optional[Any] = None) -> bytes: - """ Verifies if address is P2PKH or Multisig and create correct output script - - :param address: address to send tokens - :type address: bytes - - :param timelock: timestamp until when the output is locked - :type timelock: bytes - - :raises ScriptError: if address is not from one of the possible options - - :rtype: bytes - """ - settings = get_settings() - # XXX: if the address class can somehow be simplified create_base_script could be used here - if address[0] == binary_to_int(settings.P2PKH_VERSION_BYTE): - return P2PKH.create_output_script(address, timelock) - elif address[0] == binary_to_int(settings.MULTISIG_VERSION_BYTE): - return MultiSig.create_output_script(address, timelock) - else: - raise ScriptError('The address is not valid') - - -def parse_address_script(script: bytes) -> Optional[Union[P2PKH, MultiSig]]: - """ Verifies if address is P2PKH or Multisig and calls correct parse_script method - - :param script: script to decode - :type script: bytes - - :return: P2PKH or MultiSig class or None - :rtype: class or None - """ - script_classes: list[type[Union[P2PKH, MultiSig]]] = [P2PKH, MultiSig] - # Each class verifies its script - for script_class in script_classes: - if script_class.re_match.search(script): - return script_class.parse_script(script) - return None - - -def decode_opn(opcode: int) -> int: - """ Decode integer opcode (OP_N) to its integer value - - :param opcode: the opcode to convert - :type opcode: bytes - - :raises InvalidScriptError: case opcode is not a valid OP_N - - :return: int value for opcode param - :rtype: int - """ - int_val = opcode - Opcode.OP_0 - if not (0 <= int_val <= 16): - raise InvalidScriptError('unknown opcode {}'.format(opcode)) - return int_val - - -def get_data_bytes(position: int, length: int, data: bytes) -> bytes: - """ Extract `length` bytes from `data` starting at `position` - - :param position: start position of bytes string to extract - :type position: int - - :param length: len of bytes str to extract - :type length: int - - :param data: script containing data to extract - :type data: bytes - - :raises OutOfData: when trying to read out of script - - :return: bytes string of extracted data - :rtype: bytes - """ - if not (0 < length <= len(data)): - raise OutOfData("length ({}) should be from 0 up to data length".format(length)) - if not (0 < position < len(data)): - raise OutOfData("position should be inside data") - if (position+length) > len(data): - raise OutOfData('trying to read {} bytes starting at {}, available {}'.format(length, position, len(data))) - return data[position:position+length] - - -def get_data_single_byte(position: int, data: bytes) -> int: - """ Extract 1 byte from `data` at `position` - - :param position: position of byte to extract - :type position: int - - :param data: script containing data to extract - :type data: bytes - - :raises OutOfData: when trying to read out of script - - :return: extracted byte - :rtype: int - """ - if not (0 <= position < len(data)): - raise OutOfData("trying to read a byte at {} outside of data, available {}".format(position, len(data))) - return data[position] - - -def get_script_op(pos: int, data: bytes, stack: Optional[Stack] = None) -> OpcodePosition: - """ Interpret opcode at `pos` and return the opcode and the position of the next opcode - if opcode is a pushdata, push extracted data to stack if there is a stack - - :param pos: position of opcode to read - :type pos: int - - :param data: script to be evaluated that contains data and opcodes - :type data: bytes - - :param stack: stack to put any extracted data or None if not interested on the extracted data - :type stack: Union[Stack, None] - - :raises OutOfData: when trying to read out of script - :raises InvalidScriptError: when opcode in `pos` is invalid - - :return: extracted opcode at `pos` and position of next opcode on `data` - :rtype: OpcodePosition - """ - opcode = get_data_single_byte(pos, data) - - # validate opcode - if not Opcode.is_valid_opcode(opcode): - raise InvalidScriptError('Invalid Opcode ({}) at position {} in {!r}'.format(opcode, pos, data)) - - to_append: Union[bytes, int, str] - if 1 <= opcode <= 75: - # pushdata: push up to 75 bytes on stack - pos += 1 - to_append = get_data_bytes(pos, opcode, data) - pos += opcode - if stack is not None: - stack.append(to_append) - elif opcode == Opcode.OP_PUSHDATA1: - # pushdata1: push up to 255 bytes on stack - pos += 1 - length = get_data_single_byte(pos, data) - pos += 1 - to_append = get_data_bytes(pos, length, data) - pos += length - if stack is not None: - stack.append(to_append) - elif Opcode.OP_0 <= opcode <= Opcode.OP_16: - # OP_N: push and integer (0 to 16) to stack - # OP_N in [OP_0, OP_16] - to_append = decode_opn(opcode) - pos += 1 - if stack is not None: - stack.append(to_append) - else: - # if opcode is a function and not a pushdata, move pos to next byte (next opcode) - pos += 1 - - return OpcodePosition(opcode=opcode, position=pos) - - -class _ScriptOperation(NamedTuple): - opcode: Union[Opcode, int] - position: int - data: Union[None, bytes, int, str] - - -def parse_script_ops(data: bytes) -> Generator[_ScriptOperation, None, None]: - """ Parse script yielding each operation on the script - this is an utility function to make scripts human readable for debugging and dev - - :param data: script to parse that contains data and opcodes - :type data: bytes - - :return: generator for operations on script - :rtype: Generator[_ScriptOperation, None, None] - """ - op: Union[Opcode, int] - - pos = 0 - last_pos = 0 - data_len = len(data) - stack: Stack = [] - while pos < data_len: - last_pos = pos - opcode, pos = get_script_op(pos, data, stack) - try: - op = Opcode(opcode) - except ValueError: - op = opcode - if len(stack) != 0: - yield _ScriptOperation(opcode=op, position=last_pos, data=stack.pop()) - else: - yield _ScriptOperation(opcode=op, position=last_pos, data=None) - - -def count_sigops(data: bytes) -> int: - """ Count number of signature operations on the script - - :param data: script to parse that contains data and opcodes - :type data: bytes - - :raises OutOfData: when trying to read out of script - :raises InvalidScriptError: when an invalid opcode is found - :raises InvalidScriptError: when the previous opcode to an - OP_CHECKMULTISIG is not an integer (number of operations to execute) - - :return: number of signature operations the script would do if it was executed - :rtype: int - """ - settings = get_settings() - n_ops: int = 0 - data_len: int = len(data) - pos: int = 0 - last_opcode: Union[int, None] = None - - while pos < data_len: - opcode, pos = get_script_op(pos, data) - - if opcode == Opcode.OP_CHECKSIG: - n_ops += 1 - elif opcode == Opcode.OP_CHECKMULTISIG: - assert isinstance(last_opcode, int) - if Opcode.OP_0 <= last_opcode <= Opcode.OP_16: - # Conventional OP_CHECKMULTISIG: ... ... - # this function will run op_checksig with each pair (sign_x, pubkey_y) until all signatures - # are verified so the worst case scenario is n op_checksig and the best m op_checksig - # we know m <= n, so for now we are counting n operations (the upper limit) - n_ops += decode_opn(last_opcode) - else: - # Unconventional OP_CHECKMULTISIG: - # We count the limit for PUBKEYS, since this is also the upper limit on signature operations - # that any op_checkmultisig would run - n_ops += settings.MAX_MULTISIG_PUBKEYS - last_opcode = opcode - return n_ops - - -def get_sigops_count(data: bytes, output_script: Optional[bytes] = None) -> int: - """ Count number of signature operations on the script, if it's an input script and the spent output is passed - check the spent output for MultiSig and count operations on redeem_script too - - :param data: script to parse with opcodes - :type data: bytes - - :param output_script: spent output script if data was from an TxIn - :type output_script: Union[None, bytes] - - :raises OutOfData: when trying to read out of script - :raises InvalidScriptError: when an invalid opcode is found - - :return: number of signature operations the script would do if it was executed - :rtype: int - """ - # If validating an input, should check the spent_tx for MultiSig - if output_script is not None: - # If it's multisig we have to validate the redeem_script sigop count - if MultiSig.re_match.search(output_script): - multisig_data = MultiSig.get_multisig_data(data) - # input_script + redeem_script - return count_sigops(multisig_data) - - return count_sigops(data) - - -def execute_eval(data: bytes, log: list[str], extras: ScriptExtras) -> None: - """ Execute eval from data executing opcode methods - - :param data: data to be evaluated that contains data and opcodes - :type data: bytes - - :param log: list of log messages - :type log: list[str] - - :param extras: namedtuple with extra fields - :type extras: :py:class:`hathor.transaction.scripts.ScriptExtras` - - :raises ScriptError: case opcode is not found - :raises FinalStackInvalid: case the evaluation fails - """ - stack: Stack = [] - data_len = len(data) - pos = 0 - while pos < data_len: - opcode, pos = get_script_op(pos, data, stack) - if Opcode.is_pushdata(opcode): - continue - # this is an opcode manipulating the stack - fn = MAP_OPCODE_TO_FN.get(opcode, None) - if fn is None: - # throw error - raise ScriptError('unknown opcode') - - fn(stack, log, extras) - - evaluate_final_stack(stack, log) - - -def evaluate_final_stack(stack: Stack, log: list[str]) -> None: - """ Checks the final state of the stack. - It's valid if only has 1 value on stack and that value is 1 (true) - """ - if len(stack) == 0: - log.append('Empty Stack left') - raise FinalStackInvalid('\n'.join(log)) - if len(stack) > 1: - log.append('Stack left with more than one value') - raise FinalStackInvalid('\n'.join(log)) - # check if value left on stack is 1 (true) - if stack.pop() != 1: - # stack left with non-True value - log.append('Stack left with False value') - raise FinalStackInvalid('\n'.join(log)) - - -def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> None: - """Evaluates the output script and input data according to - a very limited subset of Bitcoin's scripting language. - - :param tx: the transaction being validated, the 'owner' of the input data - :type tx: :py:class:`hathor.transaction.Transaction` - - :param txin: transaction input being evaluated - :type txin: :py:class:`hathor.transaction.TxInput` - - :param spent_tx: the transaction referenced by the input - :type spent_tx: :py:class:`hathor.transaction.BaseTransaction` - - :raises ScriptError: if script verification fails - """ - input_data = txin.data - output_script = spent_tx.outputs[txin.index].script - log: list[str] = [] - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) - - if MultiSig.re_match.search(output_script): - # For MultiSig there are 2 executions: - # First we need to evaluate that redeem_script matches redeem_script_hash - # we can't use input_data + output_script because it will end with an invalid stack - # i.e. the signatures will still be on the stack after ouput_script is executed - redeem_script_pos = MultiSig.get_multisig_redeem_script_pos(input_data) - full_data = txin.data[redeem_script_pos:] + output_script - execute_eval(full_data, log, extras) - - # Second, we need to validate that the signatures on the input_data solves the redeem_script - # we pop and append the redeem_script to the input_data and execute it - multisig_data = MultiSig.get_multisig_data(extras.txin.data) - execute_eval(multisig_data, log, extras) - else: - # merge input_data and output_script - full_data = input_data + output_script - execute_eval(full_data, log, extras) - - -def get_pushdata(data: bytes) -> bytes: - if data[0] > 75: - length = data[1] - start = 2 - else: - length = data[0] - start = 1 - return data[start:(start + length)] - - -def get_data_value(k: int, data: bytes) -> bytes: - """Extracts the kth value from data. - - data should be in the format value0:value1:value2:...:valueN. This last representation - is merely for understanding the logic. In practice, data will be a sequence of bytes, - with each value preceded by the length of such value. - - # TODO allow values larger than 255 bytes (some logic similar to OP_PUSHDATA1?) - - :param k: index of item to retrieve - :type k: int - - :param data: data to get value from - :type data: bytes - - :raises OutOfData: if data length to read is larger than what's available - :raises DataIndexError: index requested from data is not available - """ - data_len = len(data) - position = 0 - iteration = 0 - while position < data_len: - length = data[position] - if length == 0: - # TODO throw error - pass - position += 1 - if (position + length) > len(data): - raise OutOfData('trying to read {} bytes starting at {}, available {}'.format(length, position, len(data))) - value = data[position:position + length] - if iteration == k: - return value - iteration += 1 - position += length - raise DataIndexError - - -def binary_to_int(binary: bytes) -> int: - """Receives a binary and transforms it to an integer - - :param binary: value to convert - :type binary: bytes - """ - if len(binary) == 1: - _format = '!B' - elif len(binary) == 2: - _format = '!H' - elif len(binary) == 4: - _format = '!I' - elif len(binary) == 8: - _format = '!Q' - else: - raise struct.error - - (value,) = struct.unpack(_format, binary) - return value - - -def op_pushdata(position: int, full_data: bytes, stack: Stack) -> int: - """Pushes to stack when data is up to 75 bytes - - :param position: current position we're reading from full_data - :type input_data: int - - :param full_data: input data + output script combined - :type full_data: bytes - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises OutOfData: if data length to read is larger than what's available - - :return: new position to be read from full_data - :rtype: int - """ - - length, new_pos = get_script_op(position, full_data, stack) - assert length <= 75 - return new_pos - - -def op_pushdata1(position: int, full_data: bytes, stack: Stack) -> int: - """Pushes data to stack; next byte contains number of bytes to be pushed - - :param position: current position we're reading from full_data - :type input_data: int - - :param full_data: input data + output script combined - :type full_data: bytes - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises OutOfData: if data length to read is larger than what's available - - :return: new position to be read from full_data - :rtype: int - """ - opcode, new_pos = get_script_op(position, full_data, stack) - assert opcode == Opcode.OP_PUSHDATA1 - return new_pos - - -def op_dup(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Duplicates item on top of stack - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there's no element on stack - """ - if not len(stack): - raise MissingStackItems('OP_DUP: empty stack') - stack.append(stack[-1]) - - -def op_greaterthan_timestamp(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Check whether transaction's timestamp is greater than the top of stack - - The top of stack must be a big-endian u32int. - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there's no element on stack - """ - if not len(stack): - raise MissingStackItems('OP_GREATERTHAN_TIMESTAMP: empty stack') - buf = stack.pop() - assert isinstance(buf, bytes) - (timelock,) = struct.unpack('!I', buf) - if extras.tx.timestamp <= timelock: - raise TimeLocked('The output is locked until {}'.format( - datetime.datetime.fromtimestamp(timelock).strftime("%m/%d/%Y %I:%M:%S %p"))) - - -def op_equalverify(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Verifies top 2 elements from stack are equal - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there aren't 2 element on stack - :raises EqualVerifyFailed: items don't match - """ - if len(stack) < 2: - raise MissingStackItems('OP_EQUALVERIFY: need 2 elements on stack, currently {}'.format(len(stack))) - op_equal(stack, log, extras) - is_equal = stack.pop() - if not is_equal: - raise EqualVerifyFailed('Failed to verify if elements are equal') - - -def op_equal(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Verifies top 2 elements from stack are equal - - In case they are the same, we push 1 to the stack and push 0 if they are different - - :param stack: the stack used when evaluating the script - :type stack: list[] - """ - if len(stack) < 2: - raise MissingStackItems('OP_EQUAL: need 2 elements on stack, currently {}'.format(len(stack))) - elem1 = stack.pop() - elem2 = stack.pop() - assert isinstance(elem1, bytes) - assert isinstance(elem2, bytes) - if elem1 == elem2: - stack.append(1) - else: - stack.append(0) - log.append('OP_EQUAL: failed. elements: {} {}'.format(elem1.hex(), elem2.hex())) - - -def op_checksig(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Verifies public key and signature match. Expects public key to be on top of stack, followed - by signature. If they match, put 1 on stack (meaning True); otherwise, push 0 (False) - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there aren't 2 element on stack - :raises ScriptError: if pubkey on stack is not a compressed public key - - :return: if they don't match, return error message - :rtype: string - """ - if len(stack) < 2: - raise MissingStackItems('OP_CHECKSIG: need 2 elements on stack, currently {}'.format(len(stack))) - pubkey = stack.pop() - signature = stack.pop() - assert isinstance(pubkey, bytes) - assert isinstance(signature, bytes) - - if not is_pubkey_compressed(pubkey): - raise ScriptError('OP_CHECKSIG: pubkey is not a compressed public key') - try: - public_key = get_public_key_from_bytes_compressed(pubkey) - except ValueError as e: - # pubkey is not compressed public key - raise ScriptError('OP_CHECKSIG: pubkey is not a public key') from e - try: - public_key.verify(signature, extras.tx.get_sighash_all_data(), ec.ECDSA(hashes.SHA256())) - # valid, push true to stack - stack.append(1) - except InvalidSignature: - # invalid, push false to stack - stack.append(0) - log.append('OP_CHECKSIG: failed') - - -def op_hash160(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Top stack item is hashed twice: first with SHA-256 and then with RIPEMD-160. - Result is pushed back to stack. - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there's no element on stack - """ - if not len(stack): - raise MissingStackItems('OP_HASH160: empty stack') - elem1 = stack.pop() - assert isinstance(elem1, bytes) - new_elem = get_hash160(elem1) - stack.append(new_elem) - - -def op_checkdatasig(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Verifies public key, signature and data match. Expects public key to be on top of stack, followed - by signature and data. If they match, put data on stack; otherwise, fail. - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there aren't 3 element on stack - :raises OracleChecksigFailed: invalid signature, given data and public key - """ - if len(stack) < 3: - raise MissingStackItems('OP_CHECKDATASIG: need 3 elements on stack, currently {}'.format(len(stack))) - pubkey = stack.pop() - signature = stack.pop() - data = stack.pop() - assert isinstance(pubkey, bytes) - assert isinstance(signature, bytes) - assert isinstance(data, bytes) - - if not is_pubkey_compressed(pubkey): - raise ScriptError('OP_CHECKDATASIG: pubkey is not a compressed public key') - try: - public_key = get_public_key_from_bytes_compressed(pubkey) - except ValueError as e: - # pubkey is not compressed public key - raise ScriptError('OP_CHECKDATASIG: pubkey is not a public key') from e - try: - public_key.verify(signature, data, ec.ECDSA(hashes.SHA256())) - # valid, push true to stack - stack.append(data) - except InvalidSignature as e: - raise OracleChecksigFailed from e - - -def op_data_strequal(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Equivalent to an OP_GET_DATA_STR followed by an OP_EQUALVERIFY. - - Consumes three parameters from stack: . Gets the kth value - from as a string and verifies it's equal to . If so, puts - back on the stack. - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there aren't 3 element on stack - :raises VerifyFailed: verification failed - """ - if len(stack) < 3: - raise MissingStackItems('OP_DATA_STREQUAL: need 3 elements on stack, currently {}'.format(len(stack))) - value = stack.pop() - data_k = stack.pop() - data = stack.pop() - assert isinstance(value, bytes) - assert isinstance(data, bytes) - - if not isinstance(data_k, int): - raise VerifyFailed('OP_DATA_STREQUAL: value on stack should be an integer ({!r})'.format(data_k)) - - data_value = get_data_value(data_k, data) - if data_value != value: - raise VerifyFailed('OP_DATA_STREQUAL: {} x {}'.format(data_value.decode('utf-8'), value.decode('utf-8'))) - - stack.append(data) - - -def op_data_greaterthan(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Equivalent to an OP_GET_DATA_INT followed by an OP_GREATERTHAN. - - Consumes three parameters from stack: . Gets the kth value - from as an integer and verifies it's greater than . - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there aren't 3 element on stack - :raises VerifyFailed: verification failed - """ - if len(stack) < 3: - raise MissingStackItems('OP_DATA_GREATERTHAN: need 3 elements on stack, currently {}'.format(len(stack))) - value = stack.pop() - data_k = stack.pop() - data = stack.pop() - assert isinstance(value, bytes) - assert isinstance(data, bytes) - - if not isinstance(data_k, int): - raise VerifyFailed('OP_DATA_STREQUAL: value on stack should be an integer ({!r})'.format(data_k)) - - data_value = get_data_value(data_k, data) - try: - data_int = binary_to_int(data_value) - value_int = binary_to_int(value) - except (ValueError, struct.error) as e: - raise VerifyFailed from e - - if data_int <= value_int: - raise VerifyFailed('op_data_greaterthan: {} x {}'.format(data_int, value_int)) - - stack.append(data) - - -def op_data_match_interval(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Equivalent to an OP_GET_DATA_INT followed by an OP_MATCH_INTERVAL. - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there aren't 3 element on stack - :raises VerifyFailed: verification failed - """ - if len(stack) < 1: - raise MissingStackItems('OP_DATA_MATCH_INTERVAL: stack is empty') - - data_n_items = stack.pop() - assert isinstance(data_n_items, bytes) - # TODO test this can be transformed to integer - n_items = data_n_items[0] - - # number of items in stack that will be used - will_use = 2 * n_items + 3 # n data_points, n + 1 pubkeys, k and data - if len(stack) < will_use: - raise MissingStackItems('OP_DATA_MATCH_INTERVAL: need {} elements on stack, currently {}'.format( - will_use, len(stack))) - - items = [] - try: - for _ in range(n_items): - pubkey = stack.pop() - buf = stack.pop() - assert isinstance(pubkey, (str, bytes)) - assert isinstance(buf, bytes) - value = binary_to_int(buf) - items.append((value, pubkey)) - # one pubkey is left on stack - last_pubkey = stack.pop() - # next two items are data index and data - data_k = stack.pop() - data = stack.pop() - assert isinstance(data_k, int) - assert isinstance(data, bytes) - data_value = get_data_value(data_k, data) - data_int = binary_to_int(data_value) - except (ValueError, struct.error) as e: - raise VerifyFailed from e - - for (value_int, pubkey) in items: - if data_int > value_int: - stack.append(pubkey) - return - # if none of the values match, last pubkey on stack is winner - stack.append(last_pubkey) - - -def op_data_match_value(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Equivalent to an OP_GET_DATA_STR followed by an OP_MATCH_VALUE. - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if there aren't 3 element on stack - :raises VerifyFailed: verification failed - """ - if len(stack) < 1: - raise MissingStackItems('OP_DATA_MATCH_VALUE: empty stack') - - data_n_items = stack.pop() - assert isinstance(data_n_items, bytes) - # TODO test this can be transformed to integer - n_items = data_n_items[0] - - # number of items in stack that will be used - will_use = 2 * n_items + 3 # n data_points, n + 1 keys, k and data - if len(stack) < will_use: - raise MissingStackItems('OP_DATA_MATCH_VALUE: need {} elements on stack, currently {}'.format( - will_use, len(stack))) - - items = {} - try: - for _ in range(n_items): - pubkey = stack.pop() - buf = stack.pop() - assert isinstance(pubkey, (str, bytes)) - assert isinstance(buf, bytes) - value = binary_to_int(buf) - items[value] = pubkey - except (ValueError, struct.error) as e: - raise VerifyFailed from e - - # one pubkey is left on stack - last_pubkey = stack.pop() - # next two items are data index and data - data_k = stack.pop() - data = stack.pop() - assert isinstance(data_k, int) - assert isinstance(data, bytes) - data_value = get_data_value(data_k, data) - data_int = binary_to_int(data_value) - winner_pubkey = items.get(data_int, last_pubkey) - assert isinstance(winner_pubkey, (str, bytes)) - stack.append(winner_pubkey) - - -def op_find_p2pkh(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Checks whether the current transaction has an output with a P2PKH script with - the given public key hash and the same amount as the input. - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :param tx: Transaction to be added - :type tx: :py:class:`hathor.transaction.BaseTransaction` - - :param contract_value: amount available on the nano contract (on the original output) - :type contract_type: int - - :raises MissingStackItems: if stack is empty - :raises VerifyFailed: verification failed - """ - if not len(stack): - raise MissingStackItems('OP_FIND_P2PKH: empty stack') - - spent_tx = extras.spent_tx - txin = extras.txin - tx = extras.tx - contract_value = spent_tx.outputs[txin.index].value - - address = stack.pop() - address_b58 = get_address_b58_from_bytes(address) - for output in tx.outputs: - p2pkh_out = P2PKH.parse_script(output.script) - if p2pkh_out: - if p2pkh_out.address == address_b58 and output.value == contract_value: - stack.append(1) - return - # didn't find any match - raise VerifyFailed - - -def op_checkmultisig(stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """Checks if it has the minimum signatures required and if all of them are valid - - :param stack: the stack used when evaluating the script - :type stack: list[] - - :raises MissingStackItems: if stack is empty or it has less signatures than the minimum required - :raises VerifyFailed: verification failed - """ - settings = get_settings() - - if not len(stack): - raise MissingStackItems('OP_CHECKMULTISIG: empty stack') - - # Pop the quantity of pubkeys - pubkey_count = stack.pop() - - if not isinstance(pubkey_count, int): - raise InvalidStackData('OP_CHECKMULTISIG: pubkey count should be an integer') - - if pubkey_count > settings.MAX_MULTISIG_PUBKEYS: - raise InvalidStackData('OP_CHECKMULTISIG: pubkey count ({}) exceeded the limit ({})'.format( - pubkey_count, - settings.MAX_MULTISIG_PUBKEYS, - ) - ) - - if len(stack) < pubkey_count: - raise MissingStackItems('OP_CHECKMULTISIG: not enough public keys on the stack') - - # Get all pubkeys - pubkeys = [] - for _ in range(pubkey_count): - pubkey_bytes = stack.pop() - pubkeys.append(pubkey_bytes) - - if not len(stack): - raise MissingStackItems('OP_CHECKMULTISIG: less elements than should on the stack') - - # Pop the quantity of signatures required - signatures_count = stack.pop() - - if not isinstance(signatures_count, int): - raise InvalidStackData('OP_CHECKMULTISIG: signatures count should be an integer') - - if signatures_count > settings.MAX_MULTISIG_SIGNATURES: - raise InvalidStackData('OP_CHECKMULTISIG: signature count ({}) exceeded the limit ({})'.format( - signatures_count, - settings.MAX_MULTISIG_SIGNATURES, - ) - ) - - # Error if we don't have the minimum quantity of signatures - if len(stack) < signatures_count: - raise MissingStackItems('OP_CHECKMULTISIG: not enough signatures on the stack') - - # Get all signatures - signatures = [] - for _ in range(signatures_count): - signature_bytes = stack.pop() - signatures.append(signature_bytes) - - # For each signature we check if it's valid with one of the public keys - # Signatures must be in order (same as the public keys in the multi sig wallet) - pubkey_index = 0 - for signature in signatures: - while pubkey_index < len(pubkeys): - pubkey = pubkeys[pubkey_index] - new_stack = [signature, pubkey] - op_checksig(new_stack, log, extras) - result = new_stack.pop() - pubkey_index += 1 - if result == 1: - break - else: - # finished all pubkeys and did not verify all signatures - stack.append(0) - return - - # If all signatures are valids we push 1 - stack.append(1) - - -def op_integer(opcode: int, stack: Stack, log: list[str], extras: ScriptExtras) -> None: - """ Appends an integer to the stack - We get the opcode comparing to all integers opcodes - - Example to append integer 4: - opcode will be equal to OP_4 (0x54) - Then we append the integer OP_4 - OP_0 = 4 - - :param opcode: the opcode to append to the stack - :type opcode: bytes - - :param stack: the stack used when evaluating the script - :type stack: list[] - """ - try: - stack.append(decode_opn(opcode)) - except InvalidScriptError as e: - raise ScriptError(e) from e - - -MAP_OPCODE_TO_FN: dict[int, Callable[[Stack, list[str], ScriptExtras], None]] = { - Opcode.OP_DUP: op_dup, - Opcode.OP_EQUAL: op_equal, - Opcode.OP_EQUALVERIFY: op_equalverify, - Opcode.OP_CHECKSIG: op_checksig, - Opcode.OP_HASH160: op_hash160, - Opcode.OP_GREATERTHAN_TIMESTAMP: op_greaterthan_timestamp, - Opcode.OP_CHECKMULTISIG: op_checkmultisig, - Opcode.OP_DATA_STREQUAL: op_data_strequal, - Opcode.OP_DATA_GREATERTHAN: op_data_greaterthan, - Opcode.OP_DATA_MATCH_VALUE: op_data_match_value, - Opcode.OP_CHECKDATASIG: op_checkdatasig, - Opcode.OP_FIND_P2PKH: op_find_p2pkh, -} diff --git a/hathor/transaction/scripts/__init__.py b/hathor/transaction/scripts/__init__.py new file mode 100644 index 000000000..e7f88f72c --- /dev/null +++ b/hathor/transaction/scripts/__init__.py @@ -0,0 +1,40 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction.scripts.construct import ( + create_base_script, + create_output_script, + get_sigops_count, + parse_address_script, +) +from hathor.transaction.scripts.execute import ScriptExtras, script_eval +from hathor.transaction.scripts.hathor_script import HathorScript +from hathor.transaction.scripts.multi_sig import MultiSig +from hathor.transaction.scripts.nano_contract_match_values import NanoContractMatchValues +from hathor.transaction.scripts.opcode import Opcode +from hathor.transaction.scripts.p2pkh import P2PKH + +__all__ = [ + 'Opcode', + 'P2PKH', + 'MultiSig', + 'NanoContractMatchValues', + 'HathorScript', + 'ScriptExtras', + 'parse_address_script', + 'create_base_script', + 'create_output_script', + 'script_eval', + 'get_sigops_count', +] diff --git a/hathor/transaction/scripts/base_script.py b/hathor/transaction/scripts/base_script.py new file mode 100644 index 000000000..d76510dbd --- /dev/null +++ b/hathor/transaction/scripts/base_script.py @@ -0,0 +1,47 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Any, Optional + + +class BaseScript(ABC): + """ + This class holds common methods for different script types to help abstracting the script type. + """ + + @abstractmethod + def to_human_readable(self) -> dict[str, Any]: + """Return a nice dict for using on informational json APIs.""" + raise NotImplementedError + + @abstractmethod + def get_type(self) -> str: + """Get script type name""" + raise NotImplementedError + + @abstractmethod + def get_script(self) -> bytes: + """Get or build script""" + raise NotImplementedError + + @abstractmethod + def get_address(self) -> Optional[str]: + """Get address for this script, not all valid recognizable scripts have addresses.""" + raise NotImplementedError + + @abstractmethod + def get_timelock(self) -> Optional[int]: + """Get timelock for this script, completely optional.""" + raise NotImplementedError diff --git a/hathor/transaction/scripts/construct.py b/hathor/transaction/scripts/construct.py new file mode 100644 index 000000000..8508b5270 --- /dev/null +++ b/hathor/transaction/scripts/construct.py @@ -0,0 +1,259 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from typing import TYPE_CHECKING, Any, Generator, NamedTuple, Optional, Pattern, Union + +from hathor.conf.get_settings import get_settings +from hathor.crypto.util import decode_address +from hathor.transaction.exceptions import ScriptError +from hathor.transaction.scripts.base_script import BaseScript + +if TYPE_CHECKING: + from hathor.transaction.scripts import P2PKH, MultiSig, Opcode + + +def re_compile(pattern: str) -> Pattern[bytes]: + """ Transform a given script pattern into a regular expression. + + The script pattern is like a regular expression, but you may include five + special symbols: + (i) OP_DUP, OP_HASH160, and all other opcodes; + (ii) DATA_: data with the specified length; + (iii) NUMBER: a 4-byte integer; + (iv) BLOCK: a variable length block, to be parsed later + + Example: + >>> r = re_compile( + ... '^(?:DATA_4 OP_GREATERTHAN_TIMESTAMP)? ' + ... 'OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKSIG$' + ... ) + + :return: A compiled regular expression matcher + :rtype: :py:class:`re.Pattern` + """ + + def _to_byte_pattern(m): + x = m.group().decode('ascii').strip() + if x.startswith('OP_'): + from hathor.transaction.scripts.opcode import Opcode + return bytes([Opcode[x]]) + elif x.startswith('DATA_'): + length = int(m.group()[5:]) + return _re_pushdata(length) + elif x.startswith('NUMBER'): + return b'.{5}' + elif x.startswith('BLOCK'): + return b'.*' + else: + raise ValueError('Invalid opcode: {}'.format(x)) + + p = pattern.encode('ascii') + p = re.sub(rb'\s*([A-Z0-9_]+)\s*', _to_byte_pattern, p) + return re.compile(p, re.DOTALL) + + +def _re_pushdata(length: int) -> bytes: + """ Create a regular expression that matches a data block with a given length. + + :return: A non-compiled regular expression + :rtype: bytes + """ + from hathor.transaction.scripts.opcode import Opcode + ret = [bytes([Opcode.OP_PUSHDATA1]), bytes([length]), b'.{', str(length).encode('ascii'), b'}'] + + if length <= 75: + # for now, we accept <= 75 bytes with OP_PUSHDATA1. It's optional + ret.insert(1, b'?') + + return b''.join(ret) + + +def create_base_script(address: str, timelock: Optional[Any] = None) -> BaseScript: + """ Verifies if address is P2PKH or Multisig and return the corresponding BaseScript implementation. + """ + from hathor.transaction.scripts.execute import binary_to_int + settings = get_settings() + baddress = decode_address(address) + if baddress[0] == binary_to_int(settings.P2PKH_VERSION_BYTE): + from hathor.transaction.scripts import P2PKH + return P2PKH(address, timelock) + elif baddress[0] == binary_to_int(settings.MULTISIG_VERSION_BYTE): + from hathor.transaction.scripts import MultiSig + return MultiSig(address, timelock) + else: + raise ScriptError('The address is not valid') + + +def create_output_script(address: bytes, timelock: Optional[Any] = None) -> bytes: + """ Verifies if address is P2PKH or Multisig and create correct output script + + :param address: address to send tokens + :type address: bytes + + :param timelock: timestamp until when the output is locked + :type timelock: bytes + + :raises ScriptError: if address is not from one of the possible options + + :rtype: bytes + """ + from hathor.transaction.scripts.execute import binary_to_int + settings = get_settings() + # XXX: if the address class can somehow be simplified create_base_script could be used here + if address[0] == binary_to_int(settings.P2PKH_VERSION_BYTE): + from hathor.transaction.scripts import P2PKH + return P2PKH.create_output_script(address, timelock) + elif address[0] == binary_to_int(settings.MULTISIG_VERSION_BYTE): + from hathor.transaction.scripts import MultiSig + return MultiSig.create_output_script(address, timelock) + else: + raise ScriptError('The address is not valid') + + +def parse_address_script(script: bytes) -> Optional[Union['P2PKH', 'MultiSig']]: + """ Verifies if address is P2PKH or Multisig and calls correct parse_script method + + :param script: script to decode + :type script: bytes + + :return: P2PKH or MultiSig class or None + :rtype: class or None + """ + from hathor.transaction.scripts import P2PKH, MultiSig + script_classes: list[type[Union[P2PKH, MultiSig]]] = [P2PKH, MultiSig] + # Each class verifies its script + for script_class in script_classes: + if script_class.re_match.search(script): + return script_class.parse_script(script) + return None + + +class _ScriptOperation(NamedTuple): + opcode: Union['Opcode', int] + position: int + data: Union[None, bytes, int, str] + + +def parse_script_ops(data: bytes) -> Generator[_ScriptOperation, None, None]: + """ Parse script yielding each operation on the script + this is an utility function to make scripts human readable for debugging and dev + + :param data: script to parse that contains data and opcodes + :type data: bytes + + :return: generator for operations on script + :rtype: Generator[_ScriptOperation, None, None] + """ + from hathor.transaction.scripts import Opcode + from hathor.transaction.scripts.execute import Stack, get_script_op + op: Union[Opcode, int] + + pos = 0 + last_pos = 0 + data_len = len(data) + stack: Stack = [] + while pos < data_len: + last_pos = pos + opcode, pos = get_script_op(pos, data, stack) + try: + op = Opcode(opcode) + except ValueError: + op = opcode + if len(stack) != 0: + yield _ScriptOperation(opcode=op, position=last_pos, data=stack.pop()) + else: + yield _ScriptOperation(opcode=op, position=last_pos, data=None) + + +def count_sigops(data: bytes) -> int: + """ Count number of signature operations on the script + + :param data: script to parse that contains data and opcodes + :type data: bytes + + :raises OutOfData: when trying to read out of script + :raises InvalidScriptError: when an invalid opcode is found + :raises InvalidScriptError: when the previous opcode to an + OP_CHECKMULTISIG is not an integer (number of operations to execute) + + :return: number of signature operations the script would do if it was executed + :rtype: int + """ + from hathor.transaction.scripts import Opcode + from hathor.transaction.scripts.execute import decode_opn, get_script_op + settings = get_settings() + n_ops: int = 0 + data_len: int = len(data) + pos: int = 0 + last_opcode: Union[int, None] = None + + while pos < data_len: + opcode, pos = get_script_op(pos, data) + + if opcode == Opcode.OP_CHECKSIG: + n_ops += 1 + elif opcode == Opcode.OP_CHECKMULTISIG: + assert isinstance(last_opcode, int) + if Opcode.OP_0 <= last_opcode <= Opcode.OP_16: + # Conventional OP_CHECKMULTISIG: ... ... + # this function will run op_checksig with each pair (sign_x, pubkey_y) until all signatures + # are verified so the worst case scenario is n op_checksig and the best m op_checksig + # we know m <= n, so for now we are counting n operations (the upper limit) + n_ops += decode_opn(last_opcode) + else: + # Unconventional OP_CHECKMULTISIG: + # We count the limit for PUBKEYS, since this is also the upper limit on signature operations + # that any op_checkmultisig would run + n_ops += settings.MAX_MULTISIG_PUBKEYS + last_opcode = opcode + return n_ops + + +def get_sigops_count(data: bytes, output_script: Optional[bytes] = None) -> int: + """ Count number of signature operations on the script, if it's an input script and the spent output is passed + check the spent output for MultiSig and count operations on redeem_script too + + :param data: script to parse with opcodes + :type data: bytes + + :param output_script: spent output script if data was from an TxIn + :type output_script: Union[None, bytes] + + :raises OutOfData: when trying to read out of script + :raises InvalidScriptError: when an invalid opcode is found + + :return: number of signature operations the script would do if it was executed + :rtype: int + """ + # If validating an input, should check the spent_tx for MultiSig + if output_script is not None: + # If it's multisig we have to validate the redeem_script sigop count + from hathor.transaction.scripts import MultiSig + if MultiSig.re_match.search(output_script): + multisig_data = MultiSig.get_multisig_data(data) + # input_script + redeem_script + return count_sigops(multisig_data) + + return count_sigops(data) + + +def get_pushdata(data: bytes) -> bytes: + if data[0] > 75: + length = data[1] + start = 2 + else: + length = data[0] + start = 1 + return data[start:(start + length)] diff --git a/hathor/transaction/scripts/execute.py b/hathor/transaction/scripts/execute.py new file mode 100644 index 000000000..23109afbc --- /dev/null +++ b/hathor/transaction/scripts/execute.py @@ -0,0 +1,306 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct +from typing import NamedTuple, Optional, Union + +from hathor.transaction import BaseTransaction, Transaction, TxInput +from hathor.transaction.exceptions import DataIndexError, FinalStackInvalid, InvalidScriptError, OutOfData + + +class ScriptExtras(NamedTuple): + tx: Transaction + txin: TxInput + spent_tx: BaseTransaction + + +# XXX: Because the Stack is a heterogeneous list of bytes and int, and some OPs only work for when the stack has some +# or the other type, there are many places that require an assert to prevent the wrong type from being used, +# alternatives include: 1. only using `list[bytes]` and operations that work on `int` to build them from `bytes`, +# 2. using `bytearray` instead of `list[...]` and using type codes on the stack or at least value sizes on the +# stack and OPs should use the extra info accordingly 3. using some "in stack error" at least custom exceptions +# for signaling that an OP was applied on a wrongly typed stack. +Stack = list[Union[bytes, int, str]] + + +class OpcodePosition(NamedTuple): + opcode: int + position: int + + +def execute_eval(data: bytes, log: list[str], extras: ScriptExtras) -> None: + """ Execute eval from data executing opcode methods + + :param data: data to be evaluated that contains data and opcodes + :type data: bytes + + :param log: list of log messages + :type log: list[str] + + :param extras: namedtuple with extra fields + :type extras: :py:class:`hathor.transaction.scripts.ScriptExtras` + + :raises ScriptError: case opcode is not found + :raises FinalStackInvalid: case the evaluation fails + """ + from hathor.transaction.scripts.opcode import Opcode, execute_op_code + from hathor.transaction.scripts.script_context import ScriptContext + stack: Stack = [] + context = ScriptContext(stack=stack, logs=log, extras=extras) + data_len = len(data) + pos = 0 + while pos < data_len: + opcode, pos = get_script_op(pos, data, stack) + if Opcode.is_pushdata(opcode): + continue + + # this is an opcode manipulating the stack + execute_op_code(Opcode(opcode), context) + + evaluate_final_stack(stack, log) + + +def evaluate_final_stack(stack: Stack, log: list[str]) -> None: + """ Checks the final state of the stack. + It's valid if only has 1 value on stack and that value is 1 (true) + """ + if len(stack) == 0: + log.append('Empty Stack left') + raise FinalStackInvalid('\n'.join(log)) + if len(stack) > 1: + log.append('Stack left with more than one value') + raise FinalStackInvalid('\n'.join(log)) + # check if value left on stack is 1 (true) + if stack.pop() != 1: + # stack left with non-True value + log.append('Stack left with False value') + raise FinalStackInvalid('\n'.join(log)) + + +def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> None: + """Evaluates the output script and input data according to + a very limited subset of Bitcoin's scripting language. + + :param tx: the transaction being validated, the 'owner' of the input data + :type tx: :py:class:`hathor.transaction.Transaction` + + :param txin: transaction input being evaluated + :type txin: :py:class:`hathor.transaction.TxInput` + + :param spent_tx: the transaction referenced by the input + :type spent_tx: :py:class:`hathor.transaction.BaseTransaction` + + :raises ScriptError: if script verification fails + """ + input_data = txin.data + output_script = spent_tx.outputs[txin.index].script + log: list[str] = [] + extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + + from hathor.transaction.scripts import MultiSig + if MultiSig.re_match.search(output_script): + # For MultiSig there are 2 executions: + # First we need to evaluate that redeem_script matches redeem_script_hash + # we can't use input_data + output_script because it will end with an invalid stack + # i.e. the signatures will still be on the stack after ouput_script is executed + redeem_script_pos = MultiSig.get_multisig_redeem_script_pos(input_data) + full_data = txin.data[redeem_script_pos:] + output_script + execute_eval(full_data, log, extras) + + # Second, we need to validate that the signatures on the input_data solves the redeem_script + # we pop and append the redeem_script to the input_data and execute it + multisig_data = MultiSig.get_multisig_data(extras.txin.data) + execute_eval(multisig_data, log, extras) + else: + # merge input_data and output_script + full_data = input_data + output_script + execute_eval(full_data, log, extras) + + +def decode_opn(opcode: int) -> int: + """ Decode integer opcode (OP_N) to its integer value + + :param opcode: the opcode to convert + :type opcode: bytes + + :raises InvalidScriptError: case opcode is not a valid OP_N + + :return: int value for opcode param + :rtype: int + """ + from hathor.transaction.scripts import Opcode + int_val = opcode - Opcode.OP_0 + if not (0 <= int_val <= 16): + raise InvalidScriptError('unknown opcode {}'.format(opcode)) + return int_val + + +def get_script_op(pos: int, data: bytes, stack: Optional[Stack] = None) -> OpcodePosition: + """ Interpret opcode at `pos` and return the opcode and the position of the next opcode + if opcode is a pushdata, push extracted data to stack if there is a stack + + :param pos: position of opcode to read + :type pos: int + + :param data: script to be evaluated that contains data and opcodes + :type data: bytes + + :param stack: stack to put any extracted data or None if not interested on the extracted data + :type stack: Union[Stack, None] + + :raises OutOfData: when trying to read out of script + :raises InvalidScriptError: when opcode in `pos` is invalid + + :return: extracted opcode at `pos` and position of next opcode on `data` + :rtype: OpcodePosition + """ + opcode = get_data_single_byte(pos, data) + + # validate opcode + from hathor.transaction.scripts import Opcode + if not Opcode.is_valid_opcode(opcode): + raise InvalidScriptError('Invalid Opcode ({}) at position {} in {!r}'.format(opcode, pos, data)) + + to_append: Union[bytes, int, str] + if 1 <= opcode <= 75: + # pushdata: push up to 75 bytes on stack + pos += 1 + to_append = get_data_bytes(pos, opcode, data) + pos += opcode + if stack is not None: + stack.append(to_append) + elif opcode == Opcode.OP_PUSHDATA1: + # pushdata1: push up to 255 bytes on stack + pos += 1 + length = get_data_single_byte(pos, data) + pos += 1 + to_append = get_data_bytes(pos, length, data) + pos += length + if stack is not None: + stack.append(to_append) + elif Opcode.OP_0 <= opcode <= Opcode.OP_16: + # OP_N: push and integer (0 to 16) to stack + # OP_N in [OP_0, OP_16] + to_append = decode_opn(opcode) + pos += 1 + if stack is not None: + stack.append(to_append) + else: + # if opcode is a function and not a pushdata, move pos to next byte (next opcode) + pos += 1 + + return OpcodePosition(opcode=opcode, position=pos) + + +def get_data_value(k: int, data: bytes) -> bytes: + """Extracts the kth value from data. + + data should be in the format value0:value1:value2:...:valueN. This last representation + is merely for understanding the logic. In practice, data will be a sequence of bytes, + with each value preceded by the length of such value. + + # TODO allow values larger than 255 bytes (some logic similar to OP_PUSHDATA1?) + + :param k: index of item to retrieve + :type k: int + + :param data: data to get value from + :type data: bytes + + :raises OutOfData: if data length to read is larger than what's available + :raises DataIndexError: index requested from data is not available + """ + data_len = len(data) + position = 0 + iteration = 0 + while position < data_len: + length = data[position] + if length == 0: + # TODO throw error + pass + position += 1 + if (position + length) > len(data): + raise OutOfData('trying to read {} bytes starting at {}, available {}'.format(length, position, len(data))) + value = data[position:position + length] + if iteration == k: + return value + iteration += 1 + position += length + raise DataIndexError + + +def binary_to_int(binary: bytes) -> int: + """Receives a binary and transforms it to an integer + + :param binary: value to convert + :type binary: bytes + """ + if len(binary) == 1: + _format = '!B' + elif len(binary) == 2: + _format = '!H' + elif len(binary) == 4: + _format = '!I' + elif len(binary) == 8: + _format = '!Q' + else: + raise struct.error + + (value,) = struct.unpack(_format, binary) + return value + + +def get_data_bytes(position: int, length: int, data: bytes) -> bytes: + """ Extract `length` bytes from `data` starting at `position` + + :param position: start position of bytes string to extract + :type position: int + + :param length: len of bytes str to extract + :type length: int + + :param data: script containing data to extract + :type data: bytes + + :raises OutOfData: when trying to read out of script + + :return: bytes string of extracted data + :rtype: bytes + """ + if not (0 < length <= len(data)): + raise OutOfData("length ({}) should be from 0 up to data length".format(length)) + if not (0 < position < len(data)): + raise OutOfData("position should be inside data") + if (position+length) > len(data): + raise OutOfData('trying to read {} bytes starting at {}, available {}'.format(length, position, len(data))) + return data[position:position+length] + + +def get_data_single_byte(position: int, data: bytes) -> int: + """ Extract 1 byte from `data` at `position` + + :param position: position of byte to extract + :type position: int + + :param data: script containing data to extract + :type data: bytes + + :raises OutOfData: when trying to read out of script + + :return: extracted byte + :rtype: int + """ + if not (0 <= position < len(data)): + raise OutOfData("trying to read a byte at {} outside of data, available {}".format(position, len(data))) + return data[position] diff --git a/hathor/transaction/scripts/hathor_script.py b/hathor/transaction/scripts/hathor_script.py new file mode 100644 index 000000000..0a1214c1b --- /dev/null +++ b/hathor/transaction/scripts/hathor_script.py @@ -0,0 +1,51 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct +from typing import Union + +from hathor.transaction.scripts.opcode import Opcode + + +class HathorScript: + """This class is supposed to be help build scripts abstracting some corner cases. + + For example, when pushing data to the stack, we may or may not have to use OP_PUSHDATA. + This is the sequence we have to add to the script: + - len(data) <= 75: [len(data) data] + - len(data) > 75: [OP_PUSHDATA1 len(data) data] + + pushData abstracts this differences and presents an unique interface. + """ + def __init__(self) -> None: + self.data = b'' + + def addOpcode(self, opcode: Opcode) -> None: + self.data += bytes([opcode]) + + def pushData(self, data: Union[int, bytes]) -> None: + if isinstance(data, int): + if data > 4294967295: + n = struct.pack('!Q', data) + elif data > 65535: + n = struct.pack('!I', data) + elif data > 255: + n = struct.pack('!H', data) + else: + n = struct.pack('!B', data) + data = n + if len(data) <= 75: + self.data += (bytes([len(data)]) + data) + else: + self.data += (bytes([Opcode.OP_PUSHDATA1]) + bytes([len(data)]) + data) diff --git a/hathor/transaction/scripts/multi_sig.py b/hathor/transaction/scripts/multi_sig.py new file mode 100644 index 000000000..7fe4f10ed --- /dev/null +++ b/hathor/transaction/scripts/multi_sig.py @@ -0,0 +1,184 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct +from typing import Any, Optional + +from hathor.crypto.util import decode_address, get_address_b58_from_redeem_script_hash +from hathor.transaction.scripts.base_script import BaseScript +from hathor.transaction.scripts.construct import get_pushdata, re_compile +from hathor.transaction.scripts.execute import Stack, get_script_op +from hathor.transaction.scripts.hathor_script import HathorScript +from hathor.transaction.scripts.opcode import Opcode, op_pushdata, op_pushdata1 + + +class MultiSig(BaseScript): + re_match = re_compile('^(?:(DATA_4) OP_GREATERTHAN_TIMESTAMP)? ' 'OP_HASH160 (DATA_20) OP_EQUAL$') + + def __init__(self, address: str, timelock: Optional[Any] = None) -> None: + """This class represents the multi signature script (MultiSig). It enables the group of persons + who has the corresponding private keys of the address to spend the tokens. + + This script validates the signatures and public keys on the corresponding input + data. + + Output script and the corresponding input data are usually represented like: + output script: OP_HASH160 OP_EQUAL + input data: ... + + :param address: address to send tokens + :type address: string(base58) + + :param timelock: timestamp until when it's locked + :type timelock: int + """ + self.address = address + self.timelock = timelock + + def to_human_readable(self) -> dict[str, Any]: + """ Decode MultiSig class to dict with its type and data + + :return: dict with MultiSig info + :rtype: dict[str:] + """ + ret: dict[str, Any] = {} + ret['type'] = self.get_type() + ret['address'] = self.address + ret['timelock'] = self.timelock + return ret + + def get_type(self) -> str: + return 'MultiSig' + + def get_script(self) -> bytes: + return MultiSig.create_output_script(decode_address(self.address), self.timelock) + + def get_address(self) -> Optional[str]: + return self.address + + def get_timelock(self) -> Optional[int]: + return self.timelock + + @classmethod + def get_multisig_redeem_script_pos(cls, input_data: bytes) -> int: + """ Get the position of the opcode that pushed the redeem_script on the stack + + :param input_data: data from the input being evaluated + :type input_data: bytes + + :return: position of pushdata for redeem_script + :rtype: int + """ + pos = 0 + last_pos = 0 + data_len = len(input_data) + while pos < data_len: + last_pos = pos + _, pos = get_script_op(pos, input_data) + return last_pos + + @classmethod + def create_output_script(cls, address: bytes, timelock: Optional[Any] = None) -> bytes: + """ + :param address: address to send tokens + :type address: bytes + + :param timelock: timestamp until when the output is locked + :type timelock: bytes + + :rtype: bytes + """ + assert len(address) == 25 + redeem_script_hash = address[1:-4] + s = HathorScript() + if timelock: + s.pushData(timelock) + s.addOpcode(Opcode.OP_GREATERTHAN_TIMESTAMP) + s.addOpcode(Opcode.OP_HASH160) + s.pushData(redeem_script_hash) + s.addOpcode(Opcode.OP_EQUAL) + return s.data + + @classmethod + def create_input_data(cls, redeem_script: bytes, signatures: list[bytes]) -> bytes: + """ + :param redeem_script: script to redeem the tokens: ... + :type redeem_script: bytes + + :param signatures: array of signatures to validate the input and redeem the tokens + :type signagures: list[bytes] + + :rtype: bytes + """ + s = HathorScript() + for signature in signatures: + s.pushData(signature) + s.pushData(redeem_script) + return s.data + + @classmethod + def parse_script(cls, script: bytes) -> Optional['MultiSig']: + """Checks if the given script is of type multisig. If it is, returns the MultiSig object. + Otherwise, returns None. + + :param script: script to check + :type script: bytes + + :rtype: :py:class:`hathor.transaction.scripts.MultiSig` or None + """ + match = cls.re_match.search(script) + if match: + groups = match.groups() + timelock = None + pushdata_timelock = groups[0] + if pushdata_timelock: + timelock_bytes = pushdata_timelock[1:] + timelock = struct.unpack('!I', timelock_bytes)[0] + redeem_script_hash = get_pushdata(groups[1]) + address_b58 = get_address_b58_from_redeem_script_hash(redeem_script_hash) + return cls(address_b58, timelock) + return None + + @classmethod + def get_multisig_data(cls, input_data: bytes) -> bytes: + """ Input data has many signatures and a block with the redeem script + In the second part of the script eval we need to evaluate the redeem script + so we need to get the redeem script without the block, to evaluate the elements on it + + This method removes the (possible) OP_PUSHDATA1 byte and the redeem script length, + so it can be evaluated as any normal script + + :param input_data: data from the input being evaluated + :type input_data: bytes + + :return: data ready to be evaluated. The signatures and the redeem script + :rtype: bytes + """ + pos = 0 + last_pos = 0 + stack: Stack = [] + data_len = len(input_data) + while pos < data_len: + last_pos = pos + opcode = input_data[pos] + if (opcode >= 1 and opcode <= 75): + pos = op_pushdata(pos, input_data, stack) + elif opcode == Opcode.OP_PUSHDATA1: + pos = op_pushdata1(pos, input_data, stack) + else: + pos += 1 + + redeem_script = stack[-1] + assert isinstance(redeem_script, bytes) + return input_data[:last_pos] + redeem_script diff --git a/hathor/transaction/scripts/nano_contract_match_values.py b/hathor/transaction/scripts/nano_contract_match_values.py new file mode 100644 index 000000000..cc4dfa8ff --- /dev/null +++ b/hathor/transaction/scripts/nano_contract_match_values.py @@ -0,0 +1,191 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import struct +from typing import Any, Optional + +from hathor.crypto.util import get_address_b58_from_bytes +from hathor.transaction.scripts.construct import get_pushdata, re_compile +from hathor.transaction.scripts.execute import binary_to_int +from hathor.transaction.scripts.hathor_script import HathorScript +from hathor.transaction.scripts.opcode import Opcode + + +# XXX: does it make sense to make this BaseScript too? +class NanoContractMatchValues: + re_match = re_compile('^OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKDATASIG OP_0 (BLOCK) OP_DATA_STREQUAL ' + 'OP_1 (NUMBER) OP_DATA_GREATERTHAN OP_2 (BLOCK) OP_DATA_MATCH_VALUE OP_FIND_P2PKH$') + + def __init__(self, oracle_pubkey_hash, min_timestamp, oracle_data_id, value_dict, fallback_pubkey_hash=b'\x00'): + """This class represents a nano contract that tries to match on a single value. The pubKeyHash + associated with the data given by the oracle will be able to spend the contract tokens. + + :param oracle_pubkey_hash: oracle's public key after being hashed by SHA256 and RIPMD160 + :type oracle_pubkey_hash: bytes + + :param min_timestamp: contract can only be spent after this timestamp. If we don't need it, simply + pass same timestamp as transaction + :type min_timestamp: int + + :param oracle_data_id: unique id for the data reported by the oracle. For eg, a oracle that reports + stock prices can use stock ticker symbols as this id + :type oracle_data_id: bytes + + :param value_dict: a dictionary with the pubKeyHash and corresponding value ({pubKeyHash, value}). + The pubkeyHash with value matching the data sent by oracle will be able to spend the contract funds + :type value_dict: dict[bytes, int] + + :param fallback_pubkey_hash: if none of the values match, this pubkey hash identifies the winner address + :type fallback_pubkey_hash: bytes + """ + self.oracle_pubkey_hash = oracle_pubkey_hash + self.min_timestamp = min_timestamp + self.oracle_data_id = oracle_data_id + self.value_dict = value_dict # dict[bytes, int] + self.fallback_pubkey_hash = fallback_pubkey_hash + + def to_human_readable(self) -> dict[str, Any]: + ret: dict[str, Any] = {} + ret['type'] = 'NanoContractMatchValues' + ret['oracle_pubkey_hash'] = base64.b64encode(self.oracle_pubkey_hash).decode('utf-8') + ret['min_timestamp'] = self.min_timestamp + ret['oracle_data_id'] = self.oracle_data_id.decode('utf-8') + ret['value_dict'] = {get_address_b58_from_bytes(k): v for k, v in self.value_dict.items()} + try: + if len(self.fallback_pubkey_hash) == 1: + ret['fallback_pubkey_hash'] = None + else: + ret['fallback_pubkey_hash'] = get_address_b58_from_bytes(self.fallback_pubkey_hash) + except TypeError: + ret['fallback_pubkey_hash'] = None + return ret + + def create_output_script(self) -> bytes: + """ + :return: the output script in binary + :rtype: bytes + """ + s = HathorScript() + s.addOpcode(Opcode.OP_DUP) + s.addOpcode(Opcode.OP_HASH160) + s.pushData(self.oracle_pubkey_hash) + s.addOpcode(Opcode.OP_EQUALVERIFY) + s.addOpcode(Opcode.OP_CHECKDATASIG) + # compare first value from data with oracle_data_id + s.addOpcode(Opcode.OP_0) + s.pushData(self.oracle_data_id) + s.addOpcode(Opcode.OP_DATA_STREQUAL) + # compare second value from data with min_timestamp + s.addOpcode(Opcode.OP_1) + s.pushData(struct.pack('!I', self.min_timestamp)) + s.addOpcode(Opcode.OP_DATA_GREATERTHAN) + # finally, compare third value with values on dict + s.addOpcode(Opcode.OP_2) + s.pushData(self.fallback_pubkey_hash) + for pubkey_hash, value in self.value_dict.items(): + s.pushData(value) + s.pushData(pubkey_hash) + # we use int as bytes because it may be greater than 16 + # TODO should we limit it to 16? + s.pushData(len(self.value_dict)) + s.addOpcode(Opcode.OP_DATA_MATCH_VALUE) + # pubkey left on stack should be on outputs + s.addOpcode(Opcode.OP_FIND_P2PKH) + return s.data + + @classmethod + def create_input_data(cls, data: bytes, oracle_sig: bytes, oracle_pubkey: bytes) -> bytes: + """ + :param data: data from the oracle + :type data: bytes + + :param oracle_sig: the data signed by the oracle, with its private key + :type oracle_sig: bytes + + :param oracle_pubkey: the oracle's public key + :type oracle_pubkey: bytes + + :rtype: bytes + """ + s = HathorScript() + s.pushData(data) + s.pushData(oracle_sig) + s.pushData(oracle_pubkey) + return s.data + + @classmethod + def parse_script(cls, script: bytes) -> Optional['NanoContractMatchValues']: + """Checks if the given script is of type NanoContractMatchValues. If it is, returns the corresponding object. + Otherwise, returns None. + + :param script: script to check + :type script: bytes + + :rtype: :py:class:`hathor.transaction.scripts.NanoContractMatchValues` or None + """ + # regex for this is a bit tricky, as some data has variable length. We first match the base regex for this + # script and later manually parse variable length fields + match = cls.re_match.search(script) + if match: + groups = match.groups() + # oracle pubkey hash + oracle_pubkey_hash = get_pushdata(groups[0]) + # oracle data id + oracle_data_id = get_pushdata(groups[1]) + # timestamp + timestamp = groups[2] + min_timestamp = binary_to_int(timestamp[1:]) + + # variable length data. We'll parse it manually. It should have the following format: + # fallback_pubkey_hash, [valueN, pubkey_hash_N], N + extra_data = groups[3] + + fallback_pubkey_len = extra_data[0] + if len(extra_data) < fallback_pubkey_len + 2: + # extra data has at least the fallback_pubkey length (1 byte) and number of + # values (N, after values and pubkeys). That's why we use fallback_pubkey_len + 2 + return None + fallback_pubkey = extra_data[1] if fallback_pubkey_len == 1 else extra_data[1:fallback_pubkey_len] + n_values = extra_data[-1] + + values_pubkeys = extra_data[(fallback_pubkey_len + 1):-2] + value_dict = {} + pos = 0 + for i in range(n_values): + if len(values_pubkeys[pos:]) < 1: + return None + value_len = values_pubkeys[pos] + pos += 1 + if len(values_pubkeys[pos:]) < value_len: + return None + value = values_pubkeys[pos] if value_len == 1 else binary_to_int(values_pubkeys[pos:(pos + value_len)]) + pos += value_len + if len(values_pubkeys[pos:]) < 1: + return None + pubkey_len = values_pubkeys[pos] + pos += 1 + if len(values_pubkeys[pos:]) < pubkey_len: + return None + pubkey = values_pubkeys[pos:(pos + pubkey_len)] + pos += pubkey_len + value_dict[pubkey] = value + + if len(values_pubkeys[pos:]) > 0: + # shouldn't have data left + return None + + return NanoContractMatchValues(oracle_pubkey_hash, min_timestamp, oracle_data_id, value_dict, + fallback_pubkey) + return None diff --git a/hathor/transaction/scripts/opcode.py b/hathor/transaction/scripts/opcode.py new file mode 100644 index 000000000..3c185f5a5 --- /dev/null +++ b/hathor/transaction/scripts/opcode.py @@ -0,0 +1,642 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import struct +from enum import IntEnum + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec + +from hathor.conf.get_settings import get_settings +from hathor.crypto.util import ( + get_address_b58_from_bytes, + get_hash160, + get_public_key_from_bytes_compressed, + is_pubkey_compressed, +) +from hathor.transaction.exceptions import ( + EqualVerifyFailed, + InvalidScriptError, + InvalidStackData, + MissingStackItems, + OracleChecksigFailed, + ScriptError, + TimeLocked, + VerifyFailed, +) +from hathor.transaction.scripts.execute import Stack, binary_to_int, decode_opn, get_data_value, get_script_op +from hathor.transaction.scripts.script_context import ScriptContext + + +class Opcode(IntEnum): + OP_0 = 0x50 + OP_1 = 0x51 + OP_2 = 0x52 + OP_3 = 0x53 + OP_4 = 0x54 + OP_5 = 0x55 + OP_6 = 0x56 + OP_7 = 0x57 + OP_8 = 0x58 + OP_9 = 0x59 + OP_10 = 0x5a + OP_11 = 0x5b + OP_12 = 0x5c + OP_13 = 0x5d + OP_14 = 0x5e + OP_15 = 0x5f + OP_16 = 0x60 + OP_DUP = 0x76 + OP_EQUAL = 0x87 + OP_EQUALVERIFY = 0x88 + OP_CHECKSIG = 0xAC + OP_HASH160 = 0xA9 + OP_PUSHDATA1 = 0x4C + OP_GREATERTHAN_TIMESTAMP = 0x6F + OP_CHECKMULTISIG = 0xAE + OP_CHECKDATASIG = 0xBA + OP_DATA_STREQUAL = 0xC0 + OP_DATA_GREATERTHAN = 0xC1 + OP_FIND_P2PKH = 0xD0 + OP_DATA_MATCH_VALUE = 0xD1 + + @classmethod + def is_pushdata(cls, opcode: int) -> bool: + """ Check if `opcode` represents an operation of pushing data on stack + """ + if 1 <= opcode <= 75: + # case: push [1,75] bytes on stack (op_pushdata) + return True + elif cls.OP_0 <= opcode <= cls.OP_16: + # case: push integer on stack (op_integer) + return True + elif opcode == cls.OP_PUSHDATA1: + # case: op_pushdata1 + return True + # ...Any other case + return False + + @classmethod + def is_valid_opcode(cls, opcode: int) -> bool: + """ Check if `opcode` is valid + - check for pushdata first to validate unconventional opcodes for data + - check for conventional opcode + """ + if cls.is_pushdata(opcode): + return True + try: + cls(opcode) + except ValueError: + return False + else: + return True + + +def op_pushdata(position: int, full_data: bytes, stack: Stack) -> int: + """Pushes to stack when data is up to 75 bytes + + :param position: current position we're reading from full_data + :type input_data: int + + :param full_data: input data + output script combined + :type full_data: bytes + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises OutOfData: if data length to read is larger than what's available + + :return: new position to be read from full_data + :rtype: int + """ + + length, new_pos = get_script_op(position, full_data, stack) + assert length <= 75 + return new_pos + + +def op_pushdata1(position: int, full_data: bytes, stack: Stack) -> int: + """Pushes data to stack; next byte contains number of bytes to be pushed + + :param position: current position we're reading from full_data + :type input_data: int + + :param full_data: input data + output script combined + :type full_data: bytes + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises OutOfData: if data length to read is larger than what's available + + :return: new position to be read from full_data + :rtype: int + """ + opcode, new_pos = get_script_op(position, full_data, stack) + assert opcode == Opcode.OP_PUSHDATA1 + return new_pos + + +def op_dup(context: ScriptContext) -> None: + """Duplicates item on top of stack + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there's no element on stack + """ + if not len(context.stack): + raise MissingStackItems('OP_DUP: empty stack') + context.stack.append(context.stack[-1]) + + +def op_greaterthan_timestamp(context: ScriptContext) -> None: + """Check whether transaction's timestamp is greater than the top of stack + + The top of stack must be a big-endian u32int. + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there's no element on stack + """ + if not len(context.stack): + raise MissingStackItems('OP_GREATERTHAN_TIMESTAMP: empty stack') + buf = context.stack.pop() + assert isinstance(buf, bytes) + (timelock,) = struct.unpack('!I', buf) + if context.extras.tx.timestamp <= timelock: + raise TimeLocked('The output is locked until {}'.format( + datetime.datetime.fromtimestamp(timelock).strftime("%m/%d/%Y %I:%M:%S %p"))) + + +def op_equalverify(context: ScriptContext) -> None: + """Verifies top 2 elements from stack are equal + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there aren't 2 element on stack + :raises EqualVerifyFailed: items don't match + """ + if len(context.stack) < 2: + raise MissingStackItems('OP_EQUALVERIFY: need 2 elements on stack, currently {}'.format(len(context.stack))) + op_equal(context) + is_equal = context.stack.pop() + if not is_equal: + raise EqualVerifyFailed('Failed to verify if elements are equal') + + +def op_equal(context: ScriptContext) -> None: + """Verifies top 2 elements from stack are equal + + In case they are the same, we push 1 to the stack and push 0 if they are different + + :param stack: the stack used when evaluating the script + :type stack: list[] + """ + if len(context.stack) < 2: + raise MissingStackItems('OP_EQUAL: need 2 elements on stack, currently {}'.format(len(context.stack))) + elem1 = context.stack.pop() + elem2 = context.stack.pop() + assert isinstance(elem1, bytes) + assert isinstance(elem2, bytes) + if elem1 == elem2: + context.stack.append(1) + else: + context.stack.append(0) + context.logs.append('OP_EQUAL: failed. elements: {} {}'.format(elem1.hex(), elem2.hex())) + + +def op_checksig(context: ScriptContext) -> None: + """Verifies public key and signature match. Expects public key to be on top of stack, followed + by signature. If they match, put 1 on stack (meaning True); otherwise, push 0 (False) + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there aren't 2 element on stack + :raises ScriptError: if pubkey on stack is not a compressed public key + + :return: if they don't match, return error message + :rtype: string + """ + if len(context.stack) < 2: + raise MissingStackItems('OP_CHECKSIG: need 2 elements on stack, currently {}'.format(len(context.stack))) + pubkey = context.stack.pop() + signature = context.stack.pop() + assert isinstance(pubkey, bytes) + assert isinstance(signature, bytes) + + if not is_pubkey_compressed(pubkey): + raise ScriptError('OP_CHECKSIG: pubkey is not a compressed public key') + try: + public_key = get_public_key_from_bytes_compressed(pubkey) + except ValueError as e: + # pubkey is not compressed public key + raise ScriptError('OP_CHECKSIG: pubkey is not a public key') from e + try: + public_key.verify(signature, context.extras.tx.get_sighash_all_data(), ec.ECDSA(hashes.SHA256())) + # valid, push true to stack + context.stack.append(1) + except InvalidSignature: + # invalid, push false to stack + context.stack.append(0) + context.logs.append('OP_CHECKSIG: failed') + + +def op_hash160(context: ScriptContext) -> None: + """Top stack item is hashed twice: first with SHA-256 and then with RIPEMD-160. + Result is pushed back to stack. + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there's no element on stack + """ + if not len(context.stack): + raise MissingStackItems('OP_HASH160: empty stack') + elem1 = context.stack.pop() + assert isinstance(elem1, bytes) + new_elem = get_hash160(elem1) + context.stack.append(new_elem) + + +def op_checkdatasig(context: ScriptContext) -> None: + """Verifies public key, signature and data match. Expects public key to be on top of stack, followed + by signature and data. If they match, put data on stack; otherwise, fail. + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there aren't 3 element on stack + :raises OracleChecksigFailed: invalid signature, given data and public key + """ + if len(context.stack) < 3: + raise MissingStackItems('OP_CHECKDATASIG: need 3 elements on stack, currently {}'.format(len(context.stack))) + pubkey = context.stack.pop() + signature = context.stack.pop() + data = context.stack.pop() + assert isinstance(pubkey, bytes) + assert isinstance(signature, bytes) + assert isinstance(data, bytes) + + if not is_pubkey_compressed(pubkey): + raise ScriptError('OP_CHECKDATASIG: pubkey is not a compressed public key') + try: + public_key = get_public_key_from_bytes_compressed(pubkey) + except ValueError as e: + # pubkey is not compressed public key + raise ScriptError('OP_CHECKDATASIG: pubkey is not a public key') from e + try: + public_key.verify(signature, data, ec.ECDSA(hashes.SHA256())) + # valid, push true to stack + context.stack.append(data) + except InvalidSignature as e: + raise OracleChecksigFailed from e + + +def op_data_strequal(context: ScriptContext) -> None: + """Equivalent to an OP_GET_DATA_STR followed by an OP_EQUALVERIFY. + + Consumes three parameters from stack: . Gets the kth value + from as a string and verifies it's equal to . If so, puts + back on the stack. + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there aren't 3 element on stack + :raises VerifyFailed: verification failed + """ + if len(context.stack) < 3: + raise MissingStackItems('OP_DATA_STREQUAL: need 3 elements on stack, currently {}'.format(len(context.stack))) + value = context.stack.pop() + data_k = context.stack.pop() + data = context.stack.pop() + assert isinstance(value, bytes) + assert isinstance(data, bytes) + + if not isinstance(data_k, int): + raise VerifyFailed('OP_DATA_STREQUAL: value on stack should be an integer ({!r})'.format(data_k)) + + data_value = get_data_value(data_k, data) + if data_value != value: + raise VerifyFailed('OP_DATA_STREQUAL: {} x {}'.format(data_value.decode('utf-8'), value.decode('utf-8'))) + + context.stack.append(data) + + +def op_data_greaterthan(context: ScriptContext) -> None: + """Equivalent to an OP_GET_DATA_INT followed by an OP_GREATERTHAN. + + Consumes three parameters from stack: . Gets the kth value + from as an integer and verifies it's greater than . + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there aren't 3 element on stack + :raises VerifyFailed: verification failed + """ + if len(context.stack) < 3: + raise MissingStackItems(f'OP_DATA_GREATERTHAN: need 3 elements on stack, currently {len(context.stack)}') + value = context.stack.pop() + data_k = context.stack.pop() + data = context.stack.pop() + assert isinstance(value, bytes) + assert isinstance(data, bytes) + + if not isinstance(data_k, int): + raise VerifyFailed('OP_DATA_STREQUAL: value on stack should be an integer ({!r})'.format(data_k)) + + data_value = get_data_value(data_k, data) + try: + data_int = binary_to_int(data_value) + value_int = binary_to_int(value) + except (ValueError, struct.error) as e: + raise VerifyFailed from e + + if data_int <= value_int: + raise VerifyFailed('op_data_greaterthan: {} x {}'.format(data_int, value_int)) + + context.stack.append(data) + + +def op_data_match_interval(stack: Stack) -> None: + """Equivalent to an OP_GET_DATA_INT followed by an OP_MATCH_INTERVAL. + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there aren't 3 element on stack + :raises VerifyFailed: verification failed + """ + if len(stack) < 1: + raise MissingStackItems('OP_DATA_MATCH_INTERVAL: stack is empty') + + data_n_items = stack.pop() + assert isinstance(data_n_items, bytes) + # TODO test this can be transformed to integer + n_items = data_n_items[0] + + # number of items in stack that will be used + will_use = 2 * n_items + 3 # n data_points, n + 1 pubkeys, k and data + if len(stack) < will_use: + raise MissingStackItems('OP_DATA_MATCH_INTERVAL: need {} elements on stack, currently {}'.format( + will_use, len(stack))) + + items = [] + try: + for _ in range(n_items): + pubkey = stack.pop() + buf = stack.pop() + assert isinstance(pubkey, (str, bytes)) + assert isinstance(buf, bytes) + value = binary_to_int(buf) + items.append((value, pubkey)) + # one pubkey is left on stack + last_pubkey = stack.pop() + # next two items are data index and data + data_k = stack.pop() + data = stack.pop() + assert isinstance(data_k, int) + assert isinstance(data, bytes) + data_value = get_data_value(data_k, data) + data_int = binary_to_int(data_value) + except (ValueError, struct.error) as e: + raise VerifyFailed from e + + for (value_int, pubkey) in items: + if data_int > value_int: + stack.append(pubkey) + return + # if none of the values match, last pubkey on stack is winner + stack.append(last_pubkey) + + +def op_data_match_value(context: ScriptContext) -> None: + """Equivalent to an OP_GET_DATA_STR followed by an OP_MATCH_VALUE. + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if there aren't 3 element on stack + :raises VerifyFailed: verification failed + """ + if len(context.stack) < 1: + raise MissingStackItems('OP_DATA_MATCH_VALUE: empty stack') + + data_n_items = context.stack.pop() + assert isinstance(data_n_items, bytes) + # TODO test this can be transformed to integer + n_items = data_n_items[0] + + # number of items in stack that will be used + will_use = 2 * n_items + 3 # n data_points, n + 1 keys, k and data + if len(context.stack) < will_use: + raise MissingStackItems('OP_DATA_MATCH_VALUE: need {} elements on stack, currently {}'.format( + will_use, len(context.stack))) + + items = {} + try: + for _ in range(n_items): + pubkey = context.stack.pop() + buf = context.stack.pop() + assert isinstance(pubkey, (str, bytes)) + assert isinstance(buf, bytes) + value = binary_to_int(buf) + items[value] = pubkey + except (ValueError, struct.error) as e: + raise VerifyFailed from e + + # one pubkey is left on stack + last_pubkey = context.stack.pop() + # next two items are data index and data + data_k = context.stack.pop() + data = context.stack.pop() + assert isinstance(data_k, int) + assert isinstance(data, bytes) + data_value = get_data_value(data_k, data) + data_int = binary_to_int(data_value) + winner_pubkey = items.get(data_int, last_pubkey) + assert isinstance(winner_pubkey, (str, bytes)) + context.stack.append(winner_pubkey) + + +def op_find_p2pkh(context: ScriptContext) -> None: + """Checks whether the current transaction has an output with a P2PKH script with + the given public key hash and the same amount as the input. + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :param tx: Transaction to be added + :type tx: :py:class:`hathor.transaction.BaseTransaction` + + :param contract_value: amount available on the nano contract (on the original output) + :type contract_type: int + + :raises MissingStackItems: if stack is empty + :raises VerifyFailed: verification failed + """ + if not len(context.stack): + raise MissingStackItems('OP_FIND_P2PKH: empty stack') + + from hathor.transaction.scripts import P2PKH + spent_tx = context.extras.spent_tx + txin = context.extras.txin + tx = context.extras.tx + contract_value = spent_tx.outputs[txin.index].value + + address = context.stack.pop() + address_b58 = get_address_b58_from_bytes(address) + for output in tx.outputs: + p2pkh_out = P2PKH.parse_script(output.script) + if p2pkh_out: + if p2pkh_out.address == address_b58 and output.value == contract_value: + context.stack.append(1) + return + # didn't find any match + raise VerifyFailed + + +def op_checkmultisig(context: ScriptContext) -> None: + """Checks if it has the minimum signatures required and if all of them are valid + + :param stack: the stack used when evaluating the script + :type stack: list[] + + :raises MissingStackItems: if stack is empty or it has less signatures than the minimum required + :raises VerifyFailed: verification failed + """ + settings = get_settings() + + if not len(context.stack): + raise MissingStackItems('OP_CHECKMULTISIG: empty stack') + + # Pop the quantity of pubkeys + pubkey_count = context.stack.pop() + + if not isinstance(pubkey_count, int): + raise InvalidStackData('OP_CHECKMULTISIG: pubkey count should be an integer') + + if pubkey_count > settings.MAX_MULTISIG_PUBKEYS: + raise InvalidStackData('OP_CHECKMULTISIG: pubkey count ({}) exceeded the limit ({})'.format( + pubkey_count, + settings.MAX_MULTISIG_PUBKEYS, + ) + ) + + if len(context.stack) < pubkey_count: + raise MissingStackItems('OP_CHECKMULTISIG: not enough public keys on the stack') + + # Get all pubkeys + pubkeys = [] + for _ in range(pubkey_count): + pubkey_bytes = context.stack.pop() + pubkeys.append(pubkey_bytes) + + if not len(context.stack): + raise MissingStackItems('OP_CHECKMULTISIG: less elements than should on the stack') + + # Pop the quantity of signatures required + signatures_count = context.stack.pop() + + if not isinstance(signatures_count, int): + raise InvalidStackData('OP_CHECKMULTISIG: signatures count should be an integer') + + if signatures_count > settings.MAX_MULTISIG_SIGNATURES: + raise InvalidStackData('OP_CHECKMULTISIG: signature count ({}) exceeded the limit ({})'.format( + signatures_count, + settings.MAX_MULTISIG_SIGNATURES, + ) + ) + + # Error if we don't have the minimum quantity of signatures + if len(context.stack) < signatures_count: + raise MissingStackItems('OP_CHECKMULTISIG: not enough signatures on the stack') + + # Get all signatures + signatures = [] + for _ in range(signatures_count): + signature_bytes = context.stack.pop() + signatures.append(signature_bytes) + + # For each signature we check if it's valid with one of the public keys + # Signatures must be in order (same as the public keys in the multi sig wallet) + pubkey_index = 0 + for signature in signatures: + while pubkey_index < len(pubkeys): + pubkey = pubkeys[pubkey_index] + new_stack = [signature, pubkey] + op_checksig(ScriptContext(stack=new_stack, logs=context.logs, extras=context.extras)) + result = new_stack.pop() + pubkey_index += 1 + if result == 1: + break + else: + # finished all pubkeys and did not verify all signatures + context.stack.append(0) + return + + # If all signatures are valids we push 1 + context.stack.append(1) + + +def op_integer(opcode: int, stack: Stack) -> None: + """ Appends an integer to the stack + We get the opcode comparing to all integers opcodes + + Example to append integer 4: + opcode will be equal to OP_4 (0x54) + Then we append the integer OP_4 - OP_0 = 4 + + :param opcode: the opcode to append to the stack + :type opcode: bytes + + :param stack: the stack used when evaluating the script + :type stack: list[] + """ + try: + stack.append(decode_opn(opcode)) + except InvalidScriptError as e: + raise ScriptError(e) from e + + +def execute_op_code(opcode: Opcode, context: ScriptContext) -> None: + """ + Execute a function opcode. + + Args: + opcode: the opcode to be executed. + context: the script context to be manipulated. + """ + context.logs.append(f'Executing function opcode {opcode.name} ({hex(opcode.value)})') + match opcode: + case Opcode.OP_DUP: op_dup(context) + case Opcode.OP_EQUAL: op_equal(context) + case Opcode.OP_EQUALVERIFY: op_equalverify(context) + case Opcode.OP_CHECKSIG: op_checksig(context) + case Opcode.OP_HASH160: op_hash160(context) + case Opcode.OP_GREATERTHAN_TIMESTAMP: op_greaterthan_timestamp(context) + case Opcode.OP_CHECKMULTISIG: op_checkmultisig(context) + case Opcode.OP_DATA_STREQUAL: op_data_strequal(context) + case Opcode.OP_DATA_GREATERTHAN: op_data_greaterthan(context) + case Opcode.OP_DATA_MATCH_VALUE: op_data_match_value(context) + case Opcode.OP_CHECKDATASIG: op_checkdatasig(context) + case Opcode.OP_FIND_P2PKH: op_find_p2pkh(context) + case _: raise ScriptError(f'unknown opcode: {opcode}') diff --git a/hathor/transaction/scripts/p2pkh.py b/hathor/transaction/scripts/p2pkh.py new file mode 100644 index 000000000..52812680c --- /dev/null +++ b/hathor/transaction/scripts/p2pkh.py @@ -0,0 +1,128 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct +from typing import Any, Optional + +from hathor.crypto.util import decode_address, get_address_b58_from_public_key_hash +from hathor.transaction.scripts.base_script import BaseScript +from hathor.transaction.scripts.construct import get_pushdata, re_compile +from hathor.transaction.scripts.hathor_script import HathorScript +from hathor.transaction.scripts.opcode import Opcode + + +class P2PKH(BaseScript): + re_match = re_compile('^(?:(DATA_4) OP_GREATERTHAN_TIMESTAMP)? ' + 'OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKSIG$') + + def __init__(self, address: str, timelock: Optional[int] = None) -> None: + """This class represents the pay to public hash key script. It enables the person + who has the corresponding private key of the address to spend the tokens. + + This script validates the signature and public key on the corresponding input + data. The public key is first checked against the script address and then the + signature is verified, which means the sender owns the corresponding private key. + + Output script and the corresponding input data are usually represented like: + input data: OP_DUP OP_HASH160 OP_EQUALVERIFY OP_CHECKSIG + output script: + + :param address: address to send tokens + :type address: string(base58) + + :param timelock: timestamp until when it's locked + :type timelock: int + """ + self.address = address + self.timelock = timelock + + def to_human_readable(self) -> dict[str, Any]: + ret: dict[str, Any] = {} + ret['type'] = self.get_type() + ret['address'] = self.address + ret['timelock'] = self.timelock + return ret + + def get_type(self) -> str: + return 'P2PKH' + + def get_script(self) -> bytes: + return P2PKH.create_output_script(decode_address(self.address), self.timelock) + + def get_address(self) -> Optional[str]: + return self.address + + def get_timelock(self) -> Optional[int]: + return self.timelock + + @classmethod + def create_output_script(cls, address: bytes, timelock: Optional[Any] = None) -> bytes: + """ + :param address: address to send tokens + :type address: bytes + + :param timelock: timestamp until when the output is locked + :type timelock: bytes + + :rtype: bytes + """ + assert len(address) == 25 + public_key_hash = address[1:-4] + s = HathorScript() + if timelock: + s.pushData(timelock) + s.addOpcode(Opcode.OP_GREATERTHAN_TIMESTAMP) + s.addOpcode(Opcode.OP_DUP) + s.addOpcode(Opcode.OP_HASH160) + s.pushData(public_key_hash) + s.addOpcode(Opcode.OP_EQUALVERIFY) + s.addOpcode(Opcode.OP_CHECKSIG) + return s.data + + @classmethod + def create_input_data(cls, public_key_bytes: bytes, signature: bytes) -> bytes: + """ + :param private_key: key corresponding to the address we want to spend tokens from + :type private_key: :py:class:`cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey` + + :rtype: bytes + """ + s = HathorScript() + s.pushData(signature) + s.pushData(public_key_bytes) + return s.data + + @classmethod + def parse_script(cls, script: bytes) -> Optional['P2PKH']: + """Checks if the given script is of type p2pkh. If it is, returns the P2PKH object. + Otherwise, returns None. + + :param script: script to check + :type script: bytes + + :rtype: :py:class:`hathor.transaction.scripts.P2PKH` or None + """ + match = cls.re_match.search(script) + if match: + groups = match.groups() + timelock = None + pushdata_timelock = groups[0] + if pushdata_timelock: + timelock_bytes = pushdata_timelock[1:] + timelock = struct.unpack('!I', timelock_bytes)[0] + pushdata_address = groups[1] + public_key_hash = get_pushdata(pushdata_address) + address_b58 = get_address_b58_from_public_key_hash(public_key_hash) + return cls(address_b58, timelock) + return None diff --git a/hathor/transaction/scripts/script_context.py b/hathor/transaction/scripts/script_context.py new file mode 100644 index 000000000..925a881f1 --- /dev/null +++ b/hathor/transaction/scripts/script_context.py @@ -0,0 +1,25 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction.scripts.execute import ScriptExtras, Stack + + +class ScriptContext: + """A context to be manipulated during script execution. A separate instance must be used for each script.""" + __slots__ = ('stack', 'logs', 'extras') + + def __init__(self, *, stack: Stack, logs: list[str], extras: ScriptExtras) -> None: + self.stack = stack + self.logs = logs + self.extras = extras diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 0cc41d359..8a1937a03 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -18,11 +18,11 @@ from twisted.internet import threads from hathor.indexes import IndexesManager +from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction from hathor.transaction.storage.migrations import MigrationState from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope -from hathor.util import Reactor class TransactionCacheStorage(BaseTransactionStorage): diff --git a/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py new file mode 100644 index 000000000..e231fdf46 --- /dev/null +++ b/hathor/transaction/storage/migrations/add_feature_activation_bit_counts_metadata.py @@ -0,0 +1,41 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from structlog import get_logger + +from hathor.transaction.storage.migrations import BaseMigration +from hathor.util import progress + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + +logger = get_logger() + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'add_feature_activation_bit_counts_metadata' + + def run(self, storage: 'TransactionStorage') -> None: + log = logger.new() + topological_iterator = storage.topological_iterator() + + for vertex in progress(topological_iterator, log=log, total=None): + if vertex.is_block: + vertex.update_initial_metadata() diff --git a/hathor/transaction/storage/migrations/remove_first_nop_features.py b/hathor/transaction/storage/migrations/remove_first_nop_features.py new file mode 100644 index 000000000..c245e8d22 --- /dev/null +++ b/hathor/transaction/storage/migrations/remove_first_nop_features.py @@ -0,0 +1,58 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from structlog import get_logger + +from hathor.conf.get_settings import get_settings +from hathor.transaction.storage.migrations import BaseMigration +from hathor.util import progress + +if TYPE_CHECKING: + from hathor.transaction.storage import TransactionStorage + +logger = get_logger() + + +class Migration(BaseMigration): + def skip_empty_db(self) -> bool: + return True + + def get_db_name(self) -> str: + return 'remove_first_nop_features' + + def run(self, storage: 'TransactionStorage') -> None: + """ + This migration clears the Feature Activation metadata related to the first Phased Testing on testnet. + """ + settings = get_settings() + log = logger.new() + + if settings.NETWORK_NAME != 'testnet-golf': + # If it's not testnet, we don't have to clear anything. + log.info('Skipping testnet-only migration.') + return + + topological_iterator = storage.topological_iterator() + + for vertex in progress(topological_iterator, log=log, total=None): + if vertex.is_block: + meta = vertex.get_metadata() + assert meta.height is not None + # This is the start_height of the **second** Phased Testing, so we clear anything before it. + if meta.height < 3_386_880: + meta.feature_states = None + + storage.save_transaction(vertex, only_metadata=True) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 727d4e856..a4358c6c4 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -35,7 +35,13 @@ TransactionIsNotABlock, TransactionNotInAllowedScopeError, ) -from hathor.transaction.storage.migrations import BaseMigration, MigrationState, add_min_height_metadata +from hathor.transaction.storage.migrations import ( + BaseMigration, + MigrationState, + add_feature_activation_bit_counts_metadata, + add_min_height_metadata, + remove_first_nop_features, +) from hathor.transaction.storage.tx_allow_scope import TxAllowScope, tx_allow_context from hathor.transaction.transaction import Transaction from hathor.transaction.transaction_metadata import TransactionMetadata @@ -81,6 +87,8 @@ class TransactionStorage(ABC): # history of migrations that have to be applied in the order defined here _migration_factories: list[type[BaseMigration]] = [ add_min_height_metadata.Migration, + add_feature_activation_bit_counts_metadata.Migration, + remove_first_nop_features.Migration ] _migrations: list[BaseMigration] diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index c2e63f9f2..08156ce90 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -15,11 +15,13 @@ from struct import error as StructError, pack from typing import Any, Optional +from typing_extensions import override + from hathor.transaction.base_transaction import TxInput, TxOutput, TxVersion -from hathor.transaction.exceptions import InvalidToken, TransactionDataError from hathor.transaction.storage import TransactionStorage # noqa: F401 from hathor.transaction.transaction import TokenInfo, Transaction -from hathor.transaction.util import VerboseCallback, clean_token_string, int_to_bytes, unpack, unpack_len +from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len +from hathor.types import TokenUid # Signal bits (B), version (B), inputs len (B), outputs len (B) _FUNDS_FORMAT_STRING = '!BBBB' @@ -37,7 +39,7 @@ def __init__(self, nonce: int = 0, timestamp: Optional[int] = None, signal_bits: int = 0, - version: int = TxVersion.TOKEN_CREATION_TRANSACTION, + version: TxVersion = TxVersion.TOKEN_CREATION_TRANSACTION, weight: float = 0, inputs: Optional[list[TxInput]] = None, outputs: Optional[list[TxOutput]] = None, @@ -66,12 +68,6 @@ def update_hash(self) -> None: assert self.hash is not None self.tokens = [self.hash] - def resolve(self, update_time: bool = True) -> bool: - ret = super().resolve(update_time) - assert self.hash is not None - self.tokens = [self.hash] - return ret - def get_funds_fields_from_struct(self, buf: bytes, *, verbose: VerboseCallback = None) -> bytes: """ Gets all funds fields for a transaction from a buffer. @@ -220,44 +216,15 @@ def to_json_extended(self) -> dict[str, Any]: json['tokens'] = [] return json - def verify_sum(self) -> None: - """ Besides all checks made on regular transactions, a few extra ones are made: - - only HTR tokens on the inputs; - - new tokens are actually being minted; - - :raises InvalidToken: when there's an error in token operations - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - token_dict = self.get_token_info_from_inputs() + @override + def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: + token_dict = super()._get_token_info_from_inputs() # we add the created token's info to token_dict, as the creation tx allows for mint/melt assert self.hash is not None token_dict[self.hash] = TokenInfo(0, True, True) - self.update_token_info_from_outputs(token_dict) - - # make sure tokens are being minted - token_info = token_dict[self.hash] - if token_info.amount <= 0: - raise InvalidToken('Token creation transaction must mint new tokens') - - self.check_authorities_and_deposit(token_dict) - - def verify_token_info(self) -> None: - """ Validates token info - """ - name_len = len(self.token_name) - symbol_len = len(self.token_symbol) - if name_len == 0 or name_len > self._settings.MAX_LENGTH_TOKEN_NAME: - raise TransactionDataError('Invalid token name length ({})'.format(name_len)) - if symbol_len == 0 or symbol_len > self._settings.MAX_LENGTH_TOKEN_SYMBOL: - raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) - - # Can't create token with hathor name or symbol - if clean_token_string(self.token_name) == clean_token_string(self._settings.HATHOR_TOKEN_NAME): - raise TransactionDataError('Invalid token name ({})'.format(self.token_name)) - if clean_token_string(self.token_symbol) == clean_token_string(self._settings.HATHOR_TOKEN_SYMBOL): - raise TransactionDataError('Invalid token symbol ({})'.format(self.token_symbol)) + return token_dict def decode_string_utf8(encoded: bytes, key: str) -> str: diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index 626010da2..37967461e 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -17,30 +17,13 @@ from struct import pack from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Optional -from hathor import daa from hathor.checkpoint import Checkpoint from hathor.exception import InvalidNewTransaction from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE -from hathor.transaction.exceptions import ( - ConflictingInputs, - DuplicatedParents, - IncorrectParents, - InexistentInput, - InputOutputMismatch, - InvalidInputData, - InvalidInputDataSize, - InvalidToken, - NoInputError, - RewardLocked, - ScriptError, - TimestampError, - TooManyInputs, - TooManySigOps, - WeightError, -) -from hathor.transaction.util import VerboseCallback, get_deposit_amount, get_withdraw_amount, unpack, unpack_len +from hathor.transaction.exceptions import InvalidToken +from hathor.transaction.util import VerboseCallback, unpack, unpack_len from hathor.types import TokenUid, VertexId from hathor.util import not_none @@ -75,7 +58,7 @@ def __init__(self, nonce: int = 0, timestamp: Optional[int] = None, signal_bits: int = 0, - version: int = TxVersion.REGULAR_TRANSACTION, + version: TxVersion = TxVersion.REGULAR_TRANSACTION, weight: float = 0, inputs: Optional[list[TxInput]] = None, outputs: Optional[list[TxOutput]] = None, @@ -296,90 +279,16 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: raise InvalidNewTransaction(f'Invalid new transaction {self.hash_hex}: expected to reach a checkpoint but ' 'none of its children is checkpoint-valid') - def verify_parents_basic(self) -> None: - """Verify number and non-duplicity of parents.""" - assert self.storage is not None - - # check if parents are duplicated - parents_set = set(self.parents) - if len(self.parents) > len(parents_set): - raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in self.parents]) - - if len(self.parents) != 2: - raise IncorrectParents(f'wrong number of parents (tx type): {len(self.parents)}, expecting 2') - - def verify_weight(self) -> None: - """Validate minimum tx difficulty.""" - min_tx_weight = daa.minimum_tx_weight(self) - max_tx_weight = min_tx_weight + self._settings.MAX_TX_WEIGHT_DIFF - if self.weight < min_tx_weight - self._settings.WEIGHT_TOL: - raise WeightError(f'Invalid new tx {self.hash_hex}: weight ({self.weight}) is ' - f'smaller than the minimum weight ({min_tx_weight})') - elif min_tx_weight > self._settings.MAX_TX_WEIGHT_DIFF_ACTIVATION and self.weight > max_tx_weight: - raise WeightError(f'Invalid new tx {self.hash_hex}: weight ({self.weight}) is ' - f'greater than the maximum allowed ({max_tx_weight})') - - def verify_unsigned_skip_pow(self) -> None: - """ Same as .verify but skipping pow and signature verification.""" - self.verify_number_of_inputs() - self.verify_number_of_outputs() - self.verify_outputs() - self.verify_sigops_output() - self.verify_sigops_input() - self.verify_inputs(skip_script=True) # need to run verify_inputs first to check if all inputs exist - self.verify_parents() - self.verify_sum() - - def verify_without_storage(self) -> None: - """ Run all verifications that do not need a storage. + def get_complete_token_info(self) -> dict[TokenUid, TokenInfo]: """ - self.verify_pow() - self.verify_number_of_inputs() - self.verify_outputs() - self.verify_sigops_output() - - def verify_number_of_inputs(self) -> None: - """Verify number of inputs is in a valid range""" - if len(self.inputs) > self._settings.MAX_NUM_INPUTS: - raise TooManyInputs('Maximum number of inputs exceeded') - - if len(self.inputs) == 0: - if not self.is_genesis: - raise NoInputError('Transaction must have at least one input') - - def verify_sigops_input(self) -> None: - """ Count sig operations on all inputs and verify that the total sum is below the limit + Get a complete token info dict, including data from both inputs and outputs. """ - from hathor.transaction.scripts import get_sigops_count - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - n_txops = 0 - for tx_input in self.inputs: - try: - spent_tx = self.get_spent_tx(tx_input) - except TransactionDoesNotExist: - raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) - assert spent_tx.hash is not None - if tx_input.index >= len(spent_tx.outputs): - raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( - tx_input.tx_id.hex(), tx_input.index)) - n_txops += get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) - - if n_txops > self._settings.MAX_TX_SIGOPS_INPUT: - raise TooManySigOps( - 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(self.hash_hex, n_txops)) - - def verify_outputs(self) -> None: - """Verify outputs reference an existing token uid in the tokens list - - :raises InvalidToken: output references non existent token uid - """ - super().verify_outputs() - for output in self.outputs: - # check index is valid - if output.get_token_index() > len(self.tokens): - raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) + token_dict = self._get_token_info_from_inputs() + self._update_token_info_from_outputs(token_dict=token_dict) + + return token_dict - def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: + def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ token_dict: dict[TokenUid, TokenInfo] = {} @@ -406,7 +315,7 @@ def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: return token_dict - def update_token_info_from_outputs(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + def _update_token_info_from_outputs(self, *, token_dict: dict[TokenUid, TokenInfo]) -> None: """Iterate over the outputs and add values to token info dict. Updates the dict in-place. Also, checks if no token has authorities on the outputs not present on the inputs @@ -437,61 +346,6 @@ def update_token_info_from_outputs(self, token_dict: dict[TokenUid, TokenInfo]) sum_tokens = token_info.amount + tx_output.value token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) - def check_authorities_and_deposit(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs - and outputs is not 0, make sure inputs have mint/melt authority. - - token_dict sums up all tokens present in the tx and their properties (amount, can_mint, can_melt) - amount = outputs - inputs, thus: - - amount < 0 when melting - - amount > 0 when minting - - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - withdraw = 0 - deposit = 0 - for token_uid, token_info in token_dict.items(): - if token_uid == self._settings.HATHOR_TOKEN_UID: - continue - - if token_info.amount == 0: - # that's the usual behavior, nothing to do - pass - elif token_info.amount < 0: - # tokens have been melted - if not token_info.can_melt: - raise InputOutputMismatch('{} {} tokens melted, but there is no melt authority input'.format( - token_info.amount, token_uid.hex())) - withdraw += get_withdraw_amount(token_info.amount) - else: - # tokens have been minted - if not token_info.can_mint: - raise InputOutputMismatch('{} {} tokens minted, but there is no mint authority input'.format( - (-1) * token_info.amount, token_uid.hex())) - deposit += get_deposit_amount(token_info.amount) - - # check whether the deposit/withdraw amount is correct - htr_expected_amount = withdraw - deposit - htr_info = token_dict[self._settings.HATHOR_TOKEN_UID] - if htr_info.amount != htr_expected_amount: - raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( - htr_info.amount, - htr_expected_amount, - )) - - def verify_sum(self) -> None: - """Verify that the sum of outputs is equal of the sum of inputs, for each token. - - If there are authority UTXOs involved, tokens can be minted or melted, so the above rule may - not be respected. - - :raises InvalidToken: when there's an error in token operations - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - token_dict = self.get_token_info_from_inputs() - self.update_token_info_from_outputs(token_dict) - self.check_authorities_and_deposit(token_dict) - def iter_spent_rewards(self) -> Iterator[Block]: """Iterate over all the rewards being spent, assumes tx has been verified.""" for input_tx in self.inputs: @@ -500,51 +354,6 @@ def iter_spent_rewards(self) -> Iterator[Block]: assert isinstance(spent_tx, Block) yield spent_tx - def verify_inputs(self, *, skip_script: bool = False) -> None: - """Verify inputs signatures and ownership and all inputs actually exist""" - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - - spent_outputs: set[tuple[VertexId, int]] = set() - for input_tx in self.inputs: - if len(input_tx.data) > self._settings.MAX_INPUT_DATA_SIZE: - raise InvalidInputDataSize('size: {} and max-size: {}'.format( - len(input_tx.data), self._settings.MAX_INPUT_DATA_SIZE - )) - - try: - spent_tx = self.get_spent_tx(input_tx) - assert spent_tx.hash is not None - if input_tx.index >= len(spent_tx.outputs): - raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( - input_tx.tx_id.hex(), input_tx.index)) - except TransactionDoesNotExist: - raise InexistentInput('Input tx does not exist: {}'.format(input_tx.tx_id.hex())) - - if self.timestamp <= spent_tx.timestamp: - raise TimestampError('tx={} timestamp={}, spent_tx={} timestamp={}'.format( - self.hash.hex() if self.hash else None, - self.timestamp, - spent_tx.hash.hex(), - spent_tx.timestamp, - )) - - if not skip_script: - self.verify_script(input_tx, spent_tx) - - # check if any other input in this tx is spending the same output - key = (input_tx.tx_id, input_tx.index) - if key in spent_outputs: - raise ConflictingInputs('tx {} inputs spend the same output: {} index {}'.format( - self.hash_hex, input_tx.tx_id.hex(), input_tx.index)) - spent_outputs.add(key) - - def verify_reward_locked(self) -> None: - """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only - the block rewards spent by this tx itself, and not the inherited `min_height`.""" - info = self.get_spent_reward_locked_info() - if info is not None: - raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') - def is_spent_reward_locked(self) -> bool: """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx itself, and not the inherited `min_height`""" @@ -578,17 +387,6 @@ def _spent_reward_needed_height(self, block: Block) -> int: needed_height = self._settings.REWARD_SPEND_MIN_BLOCKS - spend_blocks return max(needed_height, 0) - def verify_script(self, input_tx: TxInput, spent_tx: BaseTransaction) -> None: - """ - :type input_tx: TxInput - :type spent_tx: Transaction - """ - from hathor.transaction.scripts import script_eval - try: - script_eval(self, input_tx, spent_tx) - except ScriptError as e: - raise InvalidInputData(e) from e - def is_double_spending(self) -> bool: """ Iterate through inputs to check if they were already spent Used to prevent users from sending double spending transactions to the network diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index c7bbbaf72..c223d928f 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -53,7 +53,8 @@ class TransactionMetadata: feature_activation_bit_counts: Optional[list[int]] # A dict of features in the feature activation process and their respective state. Must only be used by Blocks, - # is None otherwise. + # is None otherwise. This is only used for caching, so it can be safely cleared up, as it would be recalculated + # when necessary. feature_states: Optional[dict[Feature, FeatureState]] = None # It must be a weakref. _tx_ref: Optional['ReferenceType[BaseTransaction]'] diff --git a/hathor/util.py b/hathor/util.py index 3e1b910db..20fd9e995 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -30,8 +30,6 @@ import hathor from hathor.conf.get_settings import get_settings -from hathor.reactor.reactor import reactor as hathor_reactor -from hathor.reactor.reactor_protocol import ReactorProtocol from hathor.types import TokenUid if TYPE_CHECKING: @@ -39,11 +37,6 @@ from hathor.transaction.base_transaction import BaseTransaction -# TODO: Those reexports are kept for retro-compatibility, but users could import them directly and then we can remove -# them from this file. -Reactor = ReactorProtocol -reactor = hathor_reactor - logger = get_logger() T = TypeVar('T') diff --git a/hathor/utils/named_tuple.py b/hathor/utils/named_tuple.py index e8064b7e4..ab29fdf37 100644 --- a/hathor/utils/named_tuple.py +++ b/hathor/utils/named_tuple.py @@ -46,8 +46,8 @@ def validated_named_tuple_from_dict( # This intermediate step shouldn't be necessary, but for some reason pydantic.create_model_from_namedtuple # doesn't support default attribute values, so we do this to add them - all_attributes = named_tuple_type(**attributes_dict) + all_attributes = named_tuple_type(**attributes_dict) # type: ignore[call-overload] validated_attributes = model(**all_attributes._asdict()) validated_attributes_dict = {k: v for k, v in validated_attributes} - return named_tuple_type(**validated_attributes_dict) + return named_tuple_type(**validated_attributes_dict) # type: ignore[call-overload] diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py index dd8903f72..d919c6bd2 100644 --- a/hathor/verification/block_verifier.py +++ b/hathor/verification/block_verifier.py @@ -12,39 +12,90 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.profiler import get_cpu_profiler +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService from hathor.transaction import Block -from hathor.verification.vertex_verifier import VertexVerifier - -cpu = get_cpu_profiler() - - -class BlockVerifier(VertexVerifier): - __slots__ = () - - def verify_basic(self, block: Block, *, skip_block_weight_verification: bool = False) -> None: - """Partially run validations, the ones that need parents/inputs are skipped.""" - if not skip_block_weight_verification: - block.verify_weight() - block.verify_reward() - - @cpu.profiler(key=lambda _, block: 'block-verify!{}'.format(block.hash.hex())) - def verify(self, block: Block) -> None: - """ - (1) confirms at least two pending transactions and references last block - (2) solves the pow with the correct weight (done in HathorManager) - (3) creates the correct amount of tokens in the output (done in HathorManager) - (4) all parents must exist and have timestamp smaller than ours - (5) data field must contain at most BLOCK_DATA_MAX_SIZE bytes - """ - # TODO Should we validate a limit of outputs? - if block.is_genesis: - # TODO do genesis validation +from hathor.transaction.exceptions import ( + BlockMustSignalError, + BlockWithInputs, + BlockWithTokensError, + InvalidBlockReward, + RewardLocked, + TransactionDataError, + WeightError, +) + + +class BlockVerifier: + __slots__ = ('_settings', '_daa', '_feature_service') + + def __init__( + self, + *, + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + feature_service: FeatureService | None = None + ) -> None: + self._settings = settings + self._daa = daa + self._feature_service = feature_service + + def verify_height(self, block: Block) -> None: + """Validate that the block height is enough to confirm all transactions being confirmed.""" + meta = block.get_metadata() + assert meta.height is not None + assert meta.min_height is not None + if meta.height < meta.min_height: + raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') + + def verify_weight(self, block: Block) -> None: + """Validate minimum block difficulty.""" + min_block_weight = self._daa.calculate_block_difficulty(block) + if block.weight < min_block_weight - self._settings.WEIGHT_TOL: + raise WeightError(f'Invalid new block {block.hash_hex}: weight ({block.weight}) is ' + f'smaller than the minimum weight ({min_block_weight})') + + def verify_reward(self, block: Block) -> None: + """Validate reward amount.""" + parent_block = block.get_block_parent() + tokens_issued_per_block = self._daa.get_tokens_issued_per_block(parent_block.get_height() + 1) + if block.sum_outputs != tokens_issued_per_block: + raise InvalidBlockReward( + f'Invalid number of issued tokens tag=invalid_issued_tokens tx.hash={block.hash_hex} ' + f'issued={block.sum_outputs} allowed={tokens_issued_per_block}' + ) + + def verify_no_inputs(self, block: Block) -> None: + inputs = getattr(block, 'inputs', None) + if inputs: + raise BlockWithInputs('number of inputs {}'.format(len(inputs))) + + def verify_output_token_indexes(self, block: Block) -> None: + for output in block.outputs: + if output.get_token_index() > 0: + raise BlockWithTokensError('in output: {}'.format(output.to_human_readable())) + + def verify_data(self, block: Block) -> None: + if len(block.data) > self._settings.BLOCK_DATA_MAX_SIZE: + raise TransactionDataError('block data has {} bytes'.format(len(block.data))) + + def verify_mandatory_signaling(self, block: Block) -> None: + """Verify whether this block is missing mandatory signaling for any feature.""" + if not self._settings.FEATURE_ACTIVATION.enable_usage: return - block.verify_without_storage() + assert self._feature_service is not None - # (1) and (4) - block.verify_parents() + signaling_state = self._feature_service.is_signaling_mandatory_features(block) - block.verify_height() + match signaling_state: + case BlockIsSignaling(): + return + case BlockIsMissingSignal(feature): + raise BlockMustSignalError( + f"Block must signal support for feature '{feature.value}' during MUST_SIGNAL phase." + ) + case _: + # TODO: This will be changed to assert_never() so mypy can check it. + raise NotImplementedError diff --git a/hathor/verification/merge_mined_block_verifier.py b/hathor/verification/merge_mined_block_verifier.py index efbfc4c07..9314fbb2a 100644 --- a/hathor/verification/merge_mined_block_verifier.py +++ b/hathor/verification/merge_mined_block_verifier.py @@ -12,8 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.verification.block_verifier import BlockVerifier +from hathor.transaction import MergeMinedBlock -class MergeMinedBlockVerifier(BlockVerifier): +class MergeMinedBlockVerifier: __slots__ = () + + def verify_aux_pow(self, block: MergeMinedBlock) -> None: + """ Verify auxiliary proof-of-work (for merged mining). + """ + assert block.aux_pow is not None + block.aux_pow.verify(block.get_base_hash()) diff --git a/hathor/verification/token_creation_transaction_verifier.py b/hathor/verification/token_creation_transaction_verifier.py index fee1bec0e..66d96f111 100644 --- a/hathor/verification/token_creation_transaction_verifier.py +++ b/hathor/verification/token_creation_transaction_verifier.py @@ -12,17 +12,46 @@ # See the License for the specific language governing permissions and # limitations under the License. +from hathor.conf.settings import HathorSettings +from hathor.transaction.exceptions import InvalidToken, TransactionDataError from hathor.transaction.token_creation_tx import TokenCreationTransaction -from hathor.verification.transaction_verifier import TransactionVerifier +from hathor.transaction.transaction import TokenInfo +from hathor.transaction.util import clean_token_string +from hathor.types import TokenUid +from hathor.util import not_none -class TokenCreationTransactionVerifier(TransactionVerifier): - __slots__ = () +class TokenCreationTransactionVerifier: + __slots__ = ('_settings',) - def verify(self, tx: TokenCreationTransaction, *, reject_locked_reward: bool = True) -> None: - """ Run all validations as regular transactions plus validation on token info. + def __init__(self, *, settings: HathorSettings) -> None: + self._settings = settings - We also overload verify_sum to make some different checks + def verify_minted_tokens(self, tx: TokenCreationTransaction, token_dict: dict[TokenUid, TokenInfo]) -> None: + """ Besides all checks made on regular transactions, a few extra ones are made: + - only HTR tokens on the inputs; + - new tokens are actually being minted; + + :raises InvalidToken: when there's an error in token operations + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + # make sure tokens are being minted + token_info = token_dict[not_none(tx.hash)] + if token_info.amount <= 0: + raise InvalidToken('Token creation transaction must mint new tokens') + + def verify_token_info(self, tx: TokenCreationTransaction) -> None: + """ Validates token info """ - super().verify(tx, reject_locked_reward=reject_locked_reward) - tx.verify_token_info() + name_len = len(tx.token_name) + symbol_len = len(tx.token_symbol) + if name_len == 0 or name_len > self._settings.MAX_LENGTH_TOKEN_NAME: + raise TransactionDataError('Invalid token name length ({})'.format(name_len)) + if symbol_len == 0 or symbol_len > self._settings.MAX_LENGTH_TOKEN_SYMBOL: + raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) + + # Can't create token with hathor name or symbol + if clean_token_string(tx.token_name) == clean_token_string(self._settings.HATHOR_TOKEN_NAME): + raise TransactionDataError('Invalid token name ({})'.format(tx.token_name)) + if clean_token_string(tx.token_symbol) == clean_token_string(self._settings.HATHOR_TOKEN_SYMBOL): + raise TransactionDataError('Invalid token symbol ({})'.format(tx.token_symbol)) diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 8c3711524..630c82147 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -12,45 +12,199 @@ # See the License for the specific language governing permissions and # limitations under the License. +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.profiler import get_cpu_profiler -from hathor.transaction import Transaction -from hathor.verification.vertex_verifier import VertexVerifier +from hathor.transaction import BaseTransaction, Transaction, TxInput +from hathor.transaction.exceptions import ( + ConflictingInputs, + DuplicatedParents, + IncorrectParents, + InexistentInput, + InputOutputMismatch, + InvalidInputData, + InvalidInputDataSize, + InvalidToken, + NoInputError, + RewardLocked, + ScriptError, + TimestampError, + TooManyInputs, + TooManySigOps, + WeightError, +) +from hathor.transaction.transaction import TokenInfo +from hathor.transaction.util import get_deposit_amount, get_withdraw_amount +from hathor.types import TokenUid, VertexId cpu = get_cpu_profiler() -class TransactionVerifier(VertexVerifier): - __slots__ = () - - def verify_basic(self, tx: Transaction) -> None: - """Partially run validations, the ones that need parents/inputs are skipped.""" - if tx.is_genesis: - # TODO do genesis validation? - return - tx.verify_parents_basic() - tx.verify_weight() - tx.verify_without_storage() - - @cpu.profiler(key=lambda _, tx: 'tx-verify!{}'.format(tx.hash.hex())) - def verify(self, tx: Transaction, *, reject_locked_reward: bool = True) -> None: - """ Common verification for all transactions: - (i) number of inputs is at most 256 - (ii) number of outputs is at most 256 - (iii) confirms at least two pending transactions - (iv) solves the pow (we verify weight is correct in HathorManager) - (v) validates signature of inputs - (vi) validates public key and output (of the inputs) addresses - (vii) validate that both parents are valid - (viii) validate input's timestamps - (ix) validate inputs and outputs sum +class TransactionVerifier: + __slots__ = ('_settings', '_daa') + + def __init__(self, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm) -> None: + self._settings = settings + self._daa = daa + + def verify_parents_basic(self, tx: Transaction) -> None: + """Verify number and non-duplicity of parents.""" + assert tx.storage is not None + + # check if parents are duplicated + parents_set = set(tx.parents) + if len(tx.parents) > len(parents_set): + raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in tx.parents]) + + if len(tx.parents) != 2: + raise IncorrectParents(f'wrong number of parents (tx type): {len(tx.parents)}, expecting 2') + + def verify_weight(self, tx: Transaction) -> None: + """Validate minimum tx difficulty.""" + min_tx_weight = self._daa.minimum_tx_weight(tx) + max_tx_weight = min_tx_weight + self._settings.MAX_TX_WEIGHT_DIFF + if tx.weight < min_tx_weight - self._settings.WEIGHT_TOL: + raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' + f'smaller than the minimum weight ({min_tx_weight})') + elif min_tx_weight > self._settings.MAX_TX_WEIGHT_DIFF_ACTIVATION and tx.weight > max_tx_weight: + raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' + f'greater than the maximum allowed ({max_tx_weight})') + + def verify_sigops_input(self, tx: Transaction) -> None: + """ Count sig operations on all inputs and verify that the total sum is below the limit + """ + from hathor.transaction.scripts import get_sigops_count + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + n_txops = 0 + for tx_input in tx.inputs: + try: + spent_tx = tx.get_spent_tx(tx_input) + except TransactionDoesNotExist: + raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) + assert spent_tx.hash is not None + if tx_input.index >= len(spent_tx.outputs): + raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( + tx_input.tx_id.hex(), tx_input.index)) + n_txops += get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) + + if n_txops > self._settings.MAX_TX_SIGOPS_INPUT: + raise TooManySigOps( + 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(tx.hash_hex, n_txops)) + + def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: + """Verify inputs signatures and ownership and all inputs actually exist""" + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + spent_outputs: set[tuple[VertexId, int]] = set() + for input_tx in tx.inputs: + if len(input_tx.data) > self._settings.MAX_INPUT_DATA_SIZE: + raise InvalidInputDataSize('size: {} and max-size: {}'.format( + len(input_tx.data), self._settings.MAX_INPUT_DATA_SIZE + )) + + try: + spent_tx = tx.get_spent_tx(input_tx) + assert spent_tx.hash is not None + if input_tx.index >= len(spent_tx.outputs): + raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( + input_tx.tx_id.hex(), input_tx.index)) + except TransactionDoesNotExist: + raise InexistentInput('Input tx does not exist: {}'.format(input_tx.tx_id.hex())) + + if tx.timestamp <= spent_tx.timestamp: + raise TimestampError('tx={} timestamp={}, spent_tx={} timestamp={}'.format( + tx.hash.hex() if tx.hash else None, + tx.timestamp, + spent_tx.hash.hex(), + spent_tx.timestamp, + )) + + if not skip_script: + self.verify_script(tx=tx, input_tx=input_tx, spent_tx=spent_tx) + + # check if any other input in this tx is spending the same output + key = (input_tx.tx_id, input_tx.index) + if key in spent_outputs: + raise ConflictingInputs('tx {} inputs spend the same output: {} index {}'.format( + tx.hash_hex, input_tx.tx_id.hex(), input_tx.index)) + spent_outputs.add(key) + + def verify_script(self, *, tx: Transaction, input_tx: TxInput, spent_tx: BaseTransaction) -> None: + """ + :type tx: Transaction + :type input_tx: TxInput + :type spent_tx: Transaction + """ + from hathor.transaction.scripts import script_eval + try: + script_eval(tx, input_tx, spent_tx) + except ScriptError as e: + raise InvalidInputData(e) from e + + def verify_reward_locked(self, tx: Transaction) -> None: + """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only + the block rewards spent by this tx itself, and not the inherited `min_height`.""" + info = tx.get_spent_reward_locked_info() + if info is not None: + raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') + + def verify_number_of_inputs(self, tx: Transaction) -> None: + """Verify number of inputs is in a valid range""" + if len(tx.inputs) > self._settings.MAX_NUM_INPUTS: + raise TooManyInputs('Maximum number of inputs exceeded') + + if len(tx.inputs) == 0: + if not tx.is_genesis: + raise NoInputError('Transaction must have at least one input') + + def verify_output_token_indexes(self, tx: Transaction) -> None: + """Verify outputs reference an existing token uid in the tokens list + + :raises InvalidToken: output references non existent token uid """ - if tx.is_genesis: - # TODO do genesis validation - return - tx.verify_without_storage() - tx.verify_sigops_input() - tx.verify_inputs() # need to run verify_inputs first to check if all inputs exist - tx.verify_parents() - tx.verify_sum() - if reject_locked_reward: - tx.verify_reward_locked() + for output in tx.outputs: + # check index is valid + if output.get_token_index() > len(tx.tokens): + raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) + + def verify_sum(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs + and outputs is not 0, make sure inputs have mint/melt authority. + + token_dict sums up all tokens present in the tx and their properties (amount, can_mint, can_melt) + amount = outputs - inputs, thus: + - amount < 0 when melting + - amount > 0 when minting + + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + withdraw = 0 + deposit = 0 + for token_uid, token_info in token_dict.items(): + if token_uid == self._settings.HATHOR_TOKEN_UID: + continue + + if token_info.amount == 0: + # that's the usual behavior, nothing to do + pass + elif token_info.amount < 0: + # tokens have been melted + if not token_info.can_melt: + raise InputOutputMismatch('{} {} tokens melted, but there is no melt authority input'.format( + token_info.amount, token_uid.hex())) + withdraw += get_withdraw_amount(token_info.amount) + else: + # tokens have been minted + if not token_info.can_mint: + raise InputOutputMismatch('{} {} tokens minted, but there is no mint authority input'.format( + (-1) * token_info.amount, token_uid.hex())) + deposit += get_deposit_amount(token_info.amount) + + # check whether the deposit/withdraw amount is correct + htr_expected_amount = withdraw - deposit + htr_info = token_dict[self._settings.HATHOR_TOKEN_UID] + if htr_info.amount != htr_expected_amount: + raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( + htr_info.amount, + htr_expected_amount, + )) diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 450de491e..efa18c6f6 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -12,38 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import NamedTuple +from typing_extensions import assert_never -from hathor.conf.settings import HathorSettings +from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion -from hathor.transaction.exceptions import TxValidationError from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.transaction.transaction import TokenInfo from hathor.transaction.validation_state import ValidationState -from hathor.verification.block_verifier import BlockVerifier -from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier -from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier -from hathor.verification.transaction_verifier import TransactionVerifier +from hathor.types import TokenUid +from hathor.verification.vertex_verifiers import VertexVerifiers - -class VertexVerifiers(NamedTuple): - """A group of verifier instances, one for each vertex type.""" - block: BlockVerifier - merge_mined_block: MergeMinedBlockVerifier - tx: TransactionVerifier - token_creation_tx: TokenCreationTransactionVerifier - - @classmethod - def create_defaults(cls, *, settings: HathorSettings) -> 'VertexVerifiers': - """ - Create a VertexVerifiers instance using the default verifier for each vertex type, - from all required dependencies. - """ - return VertexVerifiers( - block=BlockVerifier(settings=settings), - merge_mined_block=MergeMinedBlockVerifier(settings=settings), - tx=TransactionVerifier(settings=settings), - token_creation_tx=TokenCreationTransactionVerifier(settings=settings), - ) +cpu = get_cpu_profiler() class VerificationService: @@ -57,6 +36,10 @@ def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificat If no exception is raised, the ValidationState will end up as `BASIC` and return `True`. """ + # XXX: skip validation if previously validated + if vertex.get_metadata().validation.is_at_least_basic(): + return True + self.verify_basic(vertex, skip_block_weight_verification=skip_block_weight_verification) vertex.set_validation(ValidationState.BASIC) @@ -99,59 +82,172 @@ def verify_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificatio """Basic verifications (the ones without access to dependencies: parents+inputs). Raises on error. Used by `self.validate_basic`. Should not modify the validation state.""" + # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: - assert isinstance(vertex, Block) - self.verifiers.block.verify_basic( - vertex, - skip_block_weight_verification=skip_block_weight_verification - ) + assert type(vertex) is Block + self._verify_basic_block(vertex, skip_weight_verification=skip_block_weight_verification) case TxVersion.MERGE_MINED_BLOCK: - assert isinstance(vertex, MergeMinedBlock) - self.verifiers.merge_mined_block.verify_basic( - vertex, - skip_block_weight_verification=skip_block_weight_verification - ) + assert type(vertex) is MergeMinedBlock + self._verify_basic_merge_mined_block(vertex, skip_weight_verification=skip_block_weight_verification) case TxVersion.REGULAR_TRANSACTION: - assert isinstance(vertex, Transaction) - self.verifiers.tx.verify_basic(vertex) + assert type(vertex) is Transaction + self._verify_basic_tx(vertex) case TxVersion.TOKEN_CREATION_TRANSACTION: - assert isinstance(vertex, TokenCreationTransaction) - self.verifiers.token_creation_tx.verify_basic(vertex) + assert type(vertex) is TokenCreationTransaction + self._verify_basic_token_creation_tx(vertex) case _: - raise NotImplementedError + assert_never(vertex.version) + + def _verify_basic_block(self, block: Block, *, skip_weight_verification: bool) -> None: + """Partially run validations, the ones that need parents/inputs are skipped.""" + if not skip_weight_verification: + self.verifiers.block.verify_weight(block) + self.verifiers.block.verify_reward(block) + + def _verify_basic_merge_mined_block(self, block: MergeMinedBlock, *, skip_weight_verification: bool) -> None: + self._verify_basic_block(block, skip_weight_verification=skip_weight_verification) + + def _verify_basic_tx(self, tx: Transaction) -> None: + """Partially run validations, the ones that need parents/inputs are skipped.""" + if tx.is_genesis: + # TODO do genesis validation? + return + self.verifiers.tx.verify_parents_basic(tx) + self.verifiers.tx.verify_weight(tx) + self.verify_without_storage(tx) + + def _verify_basic_token_creation_tx(self, tx: TokenCreationTransaction) -> None: + self._verify_basic_tx(tx) def verify(self, vertex: BaseTransaction, *, reject_locked_reward: bool = True) -> None: """Run all verifications. Raises on error. Used by `self.validate_full`. Should not modify the validation state.""" + # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: - assert isinstance(vertex, Block) - self.verifiers.block.verify(vertex) + assert type(vertex) is Block + self._verify_block(vertex) case TxVersion.MERGE_MINED_BLOCK: - assert isinstance(vertex, MergeMinedBlock) - self.verifiers.merge_mined_block.verify(vertex) + assert type(vertex) is MergeMinedBlock + self._verify_merge_mined_block(vertex) case TxVersion.REGULAR_TRANSACTION: - assert isinstance(vertex, Transaction) - self.verifiers.tx.verify(vertex, reject_locked_reward=reject_locked_reward) + assert type(vertex) is Transaction + self._verify_tx(vertex, reject_locked_reward=reject_locked_reward) case TxVersion.TOKEN_CREATION_TRANSACTION: - assert isinstance(vertex, TokenCreationTransaction) - self.verifiers.token_creation_tx.verify(vertex, reject_locked_reward=reject_locked_reward) + assert type(vertex) is TokenCreationTransaction + self._verify_token_creation_tx(vertex, reject_locked_reward=reject_locked_reward) case _: - raise NotImplementedError + assert_never(vertex.version) + + @cpu.profiler(key=lambda _, block: 'block-verify!{}'.format(block.hash.hex())) + def _verify_block(self, block: Block) -> None: + """ + (1) confirms at least two pending transactions and references last block + (2) solves the pow with the correct weight (done in HathorManager) + (3) creates the correct amount of tokens in the output (done in HathorManager) + (4) all parents must exist and have timestamp smaller than ours + (5) data field must contain at most BLOCK_DATA_MAX_SIZE bytes + (6) whether this block must signal feature support + """ + # TODO Should we validate a limit of outputs? + if block.is_genesis: + # TODO do genesis validation + return + + self.verify_without_storage(block) + + # (1) and (4) + self.verifiers.vertex.verify_parents(block) - def validate_vertex_error(self, vertex: BaseTransaction) -> tuple[bool, str]: - """ Verify if tx is valid and return success and possible error message + self.verifiers.block.verify_height(block) - :return: Success if tx is valid and possible error message, if not - :rtype: tuple[bool, str] + self.verifiers.block.verify_mandatory_signaling(block) + + def _verify_merge_mined_block(self, block: MergeMinedBlock) -> None: + self._verify_block(block) + + @cpu.profiler(key=lambda _, tx: 'tx-verify!{}'.format(tx.hash.hex())) + def _verify_tx( + self, + tx: Transaction, + *, + reject_locked_reward: bool, + token_dict: dict[TokenUid, TokenInfo] | None = None + ) -> None: + """ Common verification for all transactions: + (i) number of inputs is at most 256 + (ii) number of outputs is at most 256 + (iii) confirms at least two pending transactions + (iv) solves the pow (we verify weight is correct in HathorManager) + (v) validates signature of inputs + (vi) validates public key and output (of the inputs) addresses + (vii) validate that both parents are valid + (viii) validate input's timestamps + (ix) validate inputs and outputs sum + """ + if tx.is_genesis: + # TODO do genesis validation + return + self.verify_without_storage(tx) + self.verifiers.tx.verify_sigops_input(tx) + self.verifiers.tx.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist + self.verifiers.vertex.verify_parents(tx) + self.verifiers.tx.verify_sum(token_dict or tx.get_complete_token_info()) + if reject_locked_reward: + self.verifiers.tx.verify_reward_locked(tx) + + def _verify_token_creation_tx(self, tx: TokenCreationTransaction, *, reject_locked_reward: bool) -> None: + """ Run all validations as regular transactions plus validation on token info. + + We also overload verify_sum to make some different checks + """ + token_dict = tx.get_complete_token_info() + self._verify_tx(tx, reject_locked_reward=reject_locked_reward, token_dict=token_dict) + self.verifiers.token_creation_tx.verify_minted_tokens(tx, token_dict) + self.verifiers.token_creation_tx.verify_token_info(tx) + + def verify_without_storage(self, vertex: BaseTransaction) -> None: + # We assert with type() instead of isinstance() because each subclass has a specific branch. + match vertex.version: + case TxVersion.REGULAR_BLOCK: + assert type(vertex) is Block + self._verify_without_storage_block(vertex) + case TxVersion.MERGE_MINED_BLOCK: + assert type(vertex) is MergeMinedBlock + self._verify_without_storage_merge_mined_block(vertex) + case TxVersion.REGULAR_TRANSACTION: + assert type(vertex) is Transaction + self._verify_without_storage_tx(vertex) + case TxVersion.TOKEN_CREATION_TRANSACTION: + assert type(vertex) is TokenCreationTransaction + self._verify_without_storage_token_creation_tx(vertex) + case _: + assert_never(vertex.version) + + def _verify_without_storage_block(self, block: Block) -> None: + """ Run all verifications that do not need a storage. + """ + self.verifiers.vertex.verify_pow(block) + self.verifiers.block.verify_no_inputs(block) + self.verifiers.vertex.verify_outputs(block) + self.verifiers.block.verify_output_token_indexes(block) + self.verifiers.block.verify_data(block) + self.verifiers.vertex.verify_sigops_output(block) + + def _verify_without_storage_merge_mined_block(self, block: MergeMinedBlock) -> None: + self.verifiers.merge_mined_block.verify_aux_pow(block) + self._verify_without_storage_block(block) + + def _verify_without_storage_tx(self, tx: Transaction) -> None: + """ Run all verifications that do not need a storage. """ - success = True - message = '' - try: - self.verify(vertex) - except TxValidationError as e: - success = False - message = str(e) - return success, message + self.verifiers.vertex.verify_pow(tx) + self.verifiers.tx.verify_number_of_inputs(tx) + self.verifiers.vertex.verify_outputs(tx) + self.verifiers.tx.verify_output_token_indexes(tx) + self.verifiers.vertex.verify_sigops_output(tx) + + def _verify_without_storage_token_creation_tx(self, tx: TokenCreationTransaction) -> None: + self._verify_without_storage_tx(tx) diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py index 360450116..80a621502 100644 --- a/hathor/verification/vertex_verifier.py +++ b/hathor/verification/vertex_verifier.py @@ -12,11 +12,167 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Optional + from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.transaction import BaseTransaction +from hathor.transaction.exceptions import ( + DuplicatedParents, + IncorrectParents, + InvalidOutputScriptSize, + InvalidOutputValue, + InvalidToken, + ParentDoesNotExist, + PowError, + TimestampError, + TooManyOutputs, + TooManySigOps, +) + +# tx should have 2 parents, both other transactions +_TX_PARENTS_TXS = 2 +_TX_PARENTS_BLOCKS = 0 + +# blocks have 3 parents, 2 txs and 1 block +_BLOCK_PARENTS_TXS = 2 +_BLOCK_PARENTS_BLOCKS = 1 class VertexVerifier: - __slots__ = ('_settings', ) + __slots__ = ('_settings', '_daa') - def __init__(self, *, settings: HathorSettings): + def __init__(self, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm): self._settings = settings + self._daa = daa + + def verify_parents(self, vertex: BaseTransaction) -> None: + """All parents must exist and their timestamps must be smaller than ours. + + Also, txs should have 2 other txs as parents, while blocks should have 2 txs + 1 block. + + Parents must be ordered with blocks first, followed by transactions. + + :raises TimestampError: when our timestamp is less or equal than our parent's timestamp + :raises ParentDoesNotExist: when at least one of our parents does not exist + :raises IncorrectParents: when tx does not confirm the correct number/type of parent txs + """ + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + assert vertex.storage is not None + + # check if parents are duplicated + parents_set = set(vertex.parents) + if len(vertex.parents) > len(parents_set): + raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in vertex.parents]) + + my_parents_txs = 0 # number of tx parents + my_parents_blocks = 0 # number of block parents + min_timestamp: Optional[int] = None + + for parent_hash in vertex.parents: + try: + parent = vertex.storage.get_transaction(parent_hash) + assert parent.hash is not None + if vertex.timestamp <= parent.timestamp: + raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( + vertex.hash_hex, + vertex.timestamp, + parent.hash_hex, + parent.timestamp, + )) + + if parent.is_block: + if vertex.is_block and not parent.is_genesis: + if vertex.timestamp - parent.timestamp > self._settings.MAX_DISTANCE_BETWEEN_BLOCKS: + raise TimestampError('Distance between blocks is too big' + ' ({} seconds)'.format(vertex.timestamp - parent.timestamp)) + if my_parents_txs > 0: + raise IncorrectParents('Parents which are blocks must come before transactions') + for pi_hash in parent.parents: + pi = vertex.storage.get_transaction(parent_hash) + if not pi.is_block: + min_timestamp = ( + min(min_timestamp, pi.timestamp) if min_timestamp is not None + else pi.timestamp + ) + my_parents_blocks += 1 + else: + if min_timestamp and parent.timestamp < min_timestamp: + raise TimestampError('tx={} timestamp={}, parent={} timestamp={}, min_timestamp={}'.format( + vertex.hash_hex, + vertex.timestamp, + parent.hash_hex, + parent.timestamp, + min_timestamp + )) + my_parents_txs += 1 + except TransactionDoesNotExist: + raise ParentDoesNotExist('tx={} parent={}'.format(vertex.hash_hex, parent_hash.hex())) + + # check for correct number of parents + if vertex.is_block: + parents_txs = _BLOCK_PARENTS_TXS + parents_blocks = _BLOCK_PARENTS_BLOCKS + else: + parents_txs = _TX_PARENTS_TXS + parents_blocks = _TX_PARENTS_BLOCKS + if my_parents_blocks != parents_blocks: + raise IncorrectParents('wrong number of parents (block type): {}, expecting {}'.format( + my_parents_blocks, parents_blocks)) + if my_parents_txs != parents_txs: + raise IncorrectParents('wrong number of parents (tx type): {}, expecting {}'.format( + my_parents_txs, parents_txs)) + + def verify_pow(self, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + """Verify proof-of-work + + :raises PowError: when the hash is equal or greater than the target + """ + assert vertex.hash is not None + numeric_hash = int(vertex.hash_hex, vertex.HEX_BASE) + minimum_target = vertex.get_target(override_weight) + if numeric_hash >= minimum_target: + raise PowError(f'Transaction has invalid data ({numeric_hash} < {minimum_target})') + + def verify_outputs(self, vertex: BaseTransaction) -> None: + """Verify there are no hathor authority UTXOs and outputs are all positive + + :raises InvalidToken: when there's a hathor authority utxo + :raises InvalidOutputValue: output has negative value + :raises TooManyOutputs: when there are too many outputs + """ + self.verify_number_of_outputs(vertex) + for index, output in enumerate(vertex.outputs): + # no hathor authority UTXO + if (output.get_token_index() == 0) and output.is_token_authority(): + raise InvalidToken('Cannot have authority UTXO for hathor tokens: {}'.format( + output.to_human_readable())) + + # output value must be positive + if output.value <= 0: + raise InvalidOutputValue('Output value must be a positive integer. Value: {} and index: {}'.format( + output.value, index)) + + if len(output.script) > self._settings.MAX_OUTPUT_SCRIPT_SIZE: + raise InvalidOutputScriptSize('size: {} and max-size: {}'.format( + len(output.script), self._settings.MAX_OUTPUT_SCRIPT_SIZE + )) + + def verify_number_of_outputs(self, vertex: BaseTransaction) -> None: + """Verify number of outputs does not exceeds the limit""" + if len(vertex.outputs) > self._settings.MAX_NUM_OUTPUTS: + raise TooManyOutputs('Maximum number of outputs exceeded') + + def verify_sigops_output(self, vertex: BaseTransaction) -> None: + """ Count sig operations on all outputs and verify that the total sum is below the limit + """ + from hathor.transaction.scripts import get_sigops_count + n_txops = 0 + + for tx_output in vertex.outputs: + n_txops += get_sigops_count(tx_output.script) + + if n_txops > self._settings.MAX_TX_SIGOPS_OUTPUT: + raise TooManySigOps('TX[{}]: Maximum number of sigops for all outputs exceeded ({})'.format( + vertex.hash_hex, n_txops)) diff --git a/hathor/verification/vertex_verifiers.py b/hathor/verification/vertex_verifiers.py new file mode 100644 index 000000000..eed2ca74f --- /dev/null +++ b/hathor/verification/vertex_verifiers.py @@ -0,0 +1,79 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import NamedTuple + +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.feature_activation.feature_service import FeatureService +from hathor.verification.block_verifier import BlockVerifier +from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier +from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier +from hathor.verification.transaction_verifier import TransactionVerifier +from hathor.verification.vertex_verifier import VertexVerifier + + +class VertexVerifiers(NamedTuple): + """A group of verifier instances, one for each vertex type.""" + vertex: VertexVerifier + block: BlockVerifier + merge_mined_block: MergeMinedBlockVerifier + tx: TransactionVerifier + token_creation_tx: TokenCreationTransactionVerifier + + @classmethod + def create_defaults( + cls, + *, + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + feature_service: FeatureService | None = None, + ) -> 'VertexVerifiers': + """ + Create a VertexVerifiers instance using the default verifier for each vertex type, + from all required dependencies. + """ + vertex_verifier = VertexVerifier(settings=settings, daa=daa) + + return cls.create( + settings=settings, + vertex_verifier=vertex_verifier, + daa=daa, + feature_service=feature_service + ) + + @classmethod + def create( + cls, + *, + settings: HathorSettings, + vertex_verifier: VertexVerifier, + daa: DifficultyAdjustmentAlgorithm, + feature_service: FeatureService | None = None, + ) -> 'VertexVerifiers': + """ + Create a VertexVerifiers instance using a custom vertex_verifier. + """ + block_verifier = BlockVerifier(settings=settings, daa=daa, feature_service=feature_service) + merge_mined_block_verifier = MergeMinedBlockVerifier() + tx_verifier = TransactionVerifier(settings=settings, daa=daa) + token_creation_tx_verifier = TokenCreationTransactionVerifier(settings=settings) + + return VertexVerifiers( + vertex=vertex_verifier, + block=block_verifier, + merge_mined_block=merge_mined_block_verifier, + tx=tx_verifier, + token_creation_tx=token_creation_tx_verifier, + ) diff --git a/hathor/version.py b/hathor/version.py index 3003f06e8..1f6b94328 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -19,7 +19,7 @@ from structlog import get_logger -BASE_VERSION = '0.57.0' +BASE_VERSION = '0.58.0' DEFAULT_VERSION_SUFFIX = "local" BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" diff --git a/hathor/wallet/base_wallet.py b/hathor/wallet/base_wallet.py index 913c13bc7..ec5e2dc5e 100644 --- a/hathor/wallet/base_wallet.py +++ b/hathor/wallet/base_wallet.py @@ -27,13 +27,13 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.pubsub import EventArguments, HathorEvents, PubSubManager +from hathor.reactor import ReactorProtocol as Reactor, get_global_reactor from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput from hathor.transaction.base_transaction import int_to_bytes from hathor.transaction.scripts import P2PKH, create_output_script, parse_address_script from hathor.transaction.storage import TransactionStorage from hathor.transaction.transaction import Transaction from hathor.types import AddressB58, Amount, TokenUid -from hathor.util import Reactor from hathor.wallet.exceptions import InputDuplicated, InsufficientFunds, PrivateKeyNotFound settings = HathorSettings() @@ -129,8 +129,7 @@ def __init__(self, directory: str = './', pubsub: Optional[PubSubManager] = None ] if reactor is None: - from hathor.util import reactor as twisted_reactor - reactor = twisted_reactor + reactor = get_global_reactor() self.reactor = reactor def _manually_initialize(self) -> None: diff --git a/hathor/wallet/resources/nano_contracts/execute.py b/hathor/wallet/resources/nano_contracts/execute.py index d08bd4c5a..ebf2c143c 100644 --- a/hathor/wallet/resources/nano_contracts/execute.py +++ b/hathor/wallet/resources/nano_contracts/execute.py @@ -20,7 +20,6 @@ from hathor.api_util import Resource, get_missing_params_msg, render_options, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH, NanoContractMatchValues from hathor.util import json_dumpb, json_loadb @@ -99,8 +98,8 @@ def render_POST(self, request): tx.parents = self.manager.get_new_tx_parents() tx.update_timestamp(int(self.manager.reactor.seconds())) - tx.weight = minimum_tx_weight(tx) - tx.resolve() + tx.weight = self.manager.daa.minimum_tx_weight(tx) + self.manager.cpu_mining_service.resolve(tx) success = self.manager.propagate_tx(tx) ret = {'success': success, 'hex_tx': tx.get_struct().hex()} diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index cf14f11fd..16bd97355 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -20,7 +20,6 @@ from hathor.api_util import Resource, render_options, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight from hathor.exception import InvalidNewTransaction from hathor.transaction import Transaction from hathor.transaction.exceptions import TxValidationError @@ -125,9 +124,9 @@ def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union tx.parents = values['parents'] weight = values['weight'] if weight is None: - weight = minimum_tx_weight(tx) + weight = self.manager.daa.minimum_tx_weight(tx) tx.weight = weight - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) self.manager.verification_service.verify(tx) return tx diff --git a/hathor/wallet/resources/sign_tx.py b/hathor/wallet/resources/sign_tx.py index cef27a689..8b86c6af4 100644 --- a/hathor/wallet/resources/sign_tx.py +++ b/hathor/wallet/resources/sign_tx.py @@ -17,7 +17,6 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.daa import minimum_tx_weight from hathor.transaction import Transaction from hathor.util import json_dumpb @@ -67,8 +66,8 @@ def render_GET(self, request): if prepare_to_send: tx.parents = self.manager.get_new_tx_parents() tx.update_timestamp(int(self.manager.reactor.seconds())) - tx.weight = minimum_tx_weight(tx) - tx.resolve() + tx.weight = self.manager.daa.minimum_tx_weight(tx) + self.manager.cpu_mining_service.resolve(tx) data = {'hex_tx': tx.get_struct().hex(), 'success': True} except struct.error: diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index 136791f47..c9bbf10ce 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -27,10 +27,11 @@ from hathor.cli.openapi_files.register import register_resource from hathor.conf.get_settings import get_settings from hathor.exception import InvalidNewTransaction +from hathor.reactor import get_global_reactor from hathor.transaction import Transaction from hathor.transaction.base_transaction import tx_or_block_from_bytes from hathor.transaction.exceptions import TxValidationError -from hathor.util import json_dumpb, json_loadb, reactor +from hathor.util import json_dumpb, json_loadb logger = get_logger() @@ -59,6 +60,7 @@ def __init__(self, manager): self.manager = manager self.sleep_seconds = 0 self.log = logger.new() + self.reactor = get_global_reactor() def render_POST(self, request: Request) -> Any: """ POST request for /thin_wallet/send_tokens/ @@ -177,7 +179,7 @@ def _render_POST(self, context: _Context) -> None: # Set parents tx.parents = self.manager.get_new_tx_parents(tx.timestamp) - deferred = threads.deferToThreadPool(reactor, self.manager.pow_thread_pool, + deferred = threads.deferToThreadPool(self.reactor, self.manager.pow_thread_pool, self._render_POST_thread, context) deferred.addCallback(self._cb_tx_resolve) deferred.addErrback(self._err_tx_resolve, context, 'python_resolve') @@ -204,7 +206,7 @@ def _stratum_deferred_resolve(self, context: _Context) -> None: # Delete it to avoid memory leak del self.manager.stratum_factory.mined_txs[funds_hash] - deferred = threads.deferToThreadPool(reactor, self.manager.pow_thread_pool, + deferred = threads.deferToThreadPool(self.reactor, self.manager.pow_thread_pool, self._stratum_thread_verify, context) deferred.addCallback(self._cb_tx_resolve) deferred.addErrback(self._err_tx_resolve, context, 'stratum_resolve') @@ -260,7 +262,11 @@ def _render_POST_thread(self, context: _Context) -> _Context: # TODO Tx should be resolved in the frontend def _should_stop(): return context.should_stop_mining_thread - context.tx.start_mining(sleep_seconds=self.sleep_seconds, should_stop=_should_stop) + self.manager.cpu_mining_service.start_mining( + context.tx, + sleep_seconds=self.sleep_seconds, + should_stop=_should_stop + ) if context.should_stop_mining_thread: raise CancelledError() context.tx.update_hash() diff --git a/hathor/websocket/factory.py b/hathor/websocket/factory.py index 1a797189d..2c7aa2d16 100644 --- a/hathor/websocket/factory.py +++ b/hathor/websocket/factory.py @@ -25,7 +25,8 @@ from hathor.metrics import Metrics from hathor.p2p.rate_limiter import RateLimiter from hathor.pubsub import EventArguments, HathorEvents -from hathor.util import json_dumpb, json_loadb, json_loads, reactor +from hathor.reactor import get_global_reactor +from hathor.util import json_dumpb, json_loadb, json_loads from hathor.websocket.protocol import HathorAdminWebsocketProtocol settings = HathorSettings() @@ -89,6 +90,7 @@ def __init__(self, metrics: Optional[Metrics] = None, address_index: Optional[Ad :param metrics: If not given, a new one is created. :type metrics: :py:class:`hathor.metrics.Metrics` """ + self.reactor = get_global_reactor() # Opened websocket connections so I can broadcast messages later # It contains only connections that have finished handshaking. self.connections: set[HathorAdminWebsocketProtocol] = set() @@ -98,7 +100,7 @@ def __init__(self, metrics: Optional[Metrics] = None, address_index: Optional[Ad super().__init__() # Limit the send message rate for specific type of data - self.rate_limiter = RateLimiter(reactor=reactor) + self.rate_limiter = RateLimiter(reactor=self.reactor) # Stores the buffer of messages that exceeded the rate limit and will be sent self.buffer_deques: dict[str, deque[dict[str, Any]]] = {} @@ -111,7 +113,7 @@ def __init__(self, metrics: Optional[Metrics] = None, address_index: Optional[Ad # A timer to periodically broadcast dashboard metrics self._lc_send_metrics = LoopingCall(self._send_metrics) - self._lc_send_metrics.clock = reactor + self._lc_send_metrics.clock = self.reactor def start(self): self.is_running = True @@ -144,7 +146,7 @@ def _send_metrics(self): 'hash_rate': self.metrics.hash_rate, 'peers': self.metrics.connected_peers, 'type': 'dashboard:metrics', - 'time': reactor.seconds(), + 'time': self.reactor.seconds(), }) def subscribe(self, pubsub): @@ -277,8 +279,8 @@ def enqueue_for_later(self, data): self.buffer_deques[data['type']].append(data) if len(self.buffer_deques[data['type']]) == 1: # If it's the first time we hit the limit (only one message in deque), we schedule process_deque - reactor.callLater(CONTROLLED_TYPES[data['type']]['time_buffering'], self.process_deque, - data_type=data['type']) + self.reactor.callLater(CONTROLLED_TYPES[data['type']]['time_buffering'], self.process_deque, + data_type=data['type']) def process_deque(self, data_type): """ Process the deque and check if I have limit to send the messages now @@ -294,8 +296,8 @@ def process_deque(self, data_type): data['throttled'] = False self.send_message(data) else: - reactor.callLater(CONTROLLED_TYPES[data_type]['time_buffering'], self.process_deque, - data_type=data_type) + self.reactor.callLater(CONTROLLED_TYPES[data_type]['time_buffering'], self.process_deque, + data_type=data_type) break def handle_message(self, connection: HathorAdminWebsocketProtocol, data: Union[bytes, str]) -> None: diff --git a/poetry.lock b/poetry.lock index 1ab969ea7..53b74c73a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -553,19 +553,19 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "flake8" -version = "6.0.0" +version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "flaky" @@ -800,21 +800,21 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pa [[package]] name = "isort" -version = "5.10.1" +version = "5.12.0" description = "A Python utility / library to sort Python imports." optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.8.0" files = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] [package.dependencies] -colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"colors\""} +colorama = {version = ">=0.4.3", optional = true, markers = "extra == \"colors\""} [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] @@ -958,37 +958,38 @@ files = [ [[package]] name = "mypy" -version = "1.4.1" +version = "1.5.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, ] [package.dependencies] @@ -999,7 +1000,6 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] @@ -1015,17 +1015,17 @@ files = [ [[package]] name = "mypy-zope" -version = "1.0.0" +version = "1.0.1" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" files = [ - {file = "mypy-zope-1.0.0.tar.gz", hash = "sha256:be815c2fcb5333aa87e8ec682029ad3214142fe2a05ea383f9ff2d77c98008b7"}, - {file = "mypy_zope-1.0.0-py3-none-any.whl", hash = "sha256:9732e9b2198f2aec3343b38a51905ff49d44dc9e39e8e8bc6fc490b232388209"}, + {file = "mypy-zope-1.0.1.tar.gz", hash = "sha256:003953896629d762d7f497135171ad549df42a8ac63c1521a230832dd6f7fc25"}, + {file = "mypy_zope-1.0.1-py3-none-any.whl", hash = "sha256:ffa291a7af9f5904ce9f0e56de44323a4476e28aaf0d68361b62b1b0e997d0b8"}, ] [package.dependencies] -mypy = ">=1.0.0,<1.5.0" +mypy = ">=1.0.0,<1.6.0" "zope.interface" = "*" "zope.schema" = "*" @@ -1189,13 +1189,13 @@ pyasn1 = ">=0.4.6,<0.5.0" [[package]] name = "pycodestyle" -version = "2.10.0" +version = "2.11.1" description = "Python style guide checker" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] @@ -1221,47 +1221,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.11" +version = "1.10.13" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, - {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, - {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, - {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, - {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, - {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, - {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, - {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, - {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, - {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, - {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, - {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, - {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, - {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, - {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, - {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, - {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, - {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, - {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, - {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, - {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, - {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, - {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] @@ -1273,13 +1273,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyflakes" -version = "3.0.1" +version = "3.1.0" description = "passive checker of Python programs" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] [[package]] @@ -1316,17 +1316,16 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pytest" -version = "7.2.0" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" @@ -1335,17 +1334,17 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -1357,13 +1356,13 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-xdist" -version = "3.2.0" +version = "3.3.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.2.0.tar.gz", hash = "sha256:fa10f95a2564cd91652f2d132725183c3b590d9fdcdec09d3677386ecf4c1ce9"}, - {file = "pytest_xdist-3.2.0-py3-none-any.whl", hash = "sha256:336098e3bbd8193276867cc87db8b22903c3927665dff9d1ac8684c02f597b68"}, + {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, + {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, ] [package.dependencies] @@ -1375,6 +1374,17 @@ psutil = ["psutil (>=3.0)"] setproctitle = ["setproctitle"] testing = ["filelock"] +[[package]] +name = "python-healthchecklib" +version = "0.1.0" +description = "Opinionated healthcheck library" +optional = false +python-versions = ">=3.8.1,<4.0.0" +files = [ + {file = "python_healthchecklib-0.1.0-py3-none-any.whl", hash = "sha256:95d94fcae7f281adf16624014ae789dfa38d1be327cc38b02ee82bad70671f2f"}, + {file = "python_healthchecklib-0.1.0.tar.gz", hash = "sha256:afa0572d37902c50232d99acf0065836082bb027109c9c98e8d5acfefd381595"}, +] + [[package]] name = "pywin32" version = "305" @@ -1898,13 +1908,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -1936,13 +1946,13 @@ files = [ [[package]] name = "yamllint" -version = "1.31.0" +version = "1.32.0" description = "A linter for YAML files." optional = false python-versions = ">=3.7" files = [ - {file = "yamllint-1.31.0-py3-none-any.whl", hash = "sha256:15f4bdb645e6a4a0a22fe5415bc38b4a934c51419b30104896d2f3f95e329185"}, - {file = "yamllint-1.31.0.tar.gz", hash = "sha256:2d83f1d12f733e162a87e06b176149d7bb9c5bae4a9e5fce1c771d7f703f7a65"}, + {file = "yamllint-1.32.0-py3-none-any.whl", hash = "sha256:d97a66e48da820829d96077d76b8dfbe6c6140f106e558dae87e81ac4e6b30b7"}, + {file = "yamllint-1.32.0.tar.gz", hash = "sha256:d01dde008c65de5b235188ab3110bebc59d18e5c65fc8a58267cd211cd9df34a"}, ] [package.dependencies] @@ -2136,4 +2146,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "62d54c9d748647746f20a2bfb84163143d744915c15138256561b29186386807" +content-hash = "2b20a90cf75e75bd32568e722489db53b4a4b490f4e3f084ff5734ea8137c37e" diff --git a/pyproject.toml b/pyproject.toml index b9b1004e9..f40746b0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.57.0" +version = "0.58.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" @@ -35,15 +35,15 @@ exclude = ["tests", "tests.*"] hathor-cli = 'hathor.cli.main:main' [tool.poetry.dev-dependencies] -flake8 = "~6.0.0" -isort = {version = "~5.10.1", extras = ["colors"]} -mypy = {version = "^1.4.1", markers = "implementation_name == 'cpython'"} -mypy-zope = {version = "^1.0.0", markers = "implementation_name == 'cpython'"} -pytest = "~7.2.0" -pytest-cov = "~4.0.0" +flake8 = "~6.1.0" +isort = {version = "~5.12.0", extras = ["colors"]} +mypy = {version = "^1.5.1", markers = "implementation_name == 'cpython'"} +mypy-zope = {version = "^1.0.1", markers = "implementation_name == 'cpython'"} +pytest = "~7.4.3" +pytest-cov = "~4.1.0" flaky = "~3.7.0" -pytest-xdist = "~3.2.0" -yamllint = "~1.31.0" +pytest-xdist = "~3.3.1" +yamllint = "~1.32.0" # stubs: types-requests = "=2.28.11.4" types-pyopenssl = "=22.1.0.2" @@ -76,8 +76,10 @@ setproctitle = "^1.2.2" sentry-sdk = {version = "^1.5.11", optional = true} structlog-sentry = {version = "^1.4.0", optional = true} hathorlib = "0.3.0" -pydantic = "~1.10.11" +pydantic = "~1.10.13" pyyaml = "^6.0.1" +typing-extensions = "~4.8.0" +python-healthchecklib = "^0.1.0" [tool.poetry.extras] sentry = ["sentry-sdk", "structlog-sentry"] diff --git a/tests/cli/test_multisig_signature.py b/tests/cli/test_multisig_signature.py index f4fce98a5..9fb802b0a 100644 --- a/tests/cli/test_multisig_signature.py +++ b/tests/cli/test_multisig_signature.py @@ -9,9 +9,10 @@ from structlog.testing import capture_logs from hathor.cli.multisig_signature import create_parser, execute +from hathor.simulator.utils import add_new_blocks from hathor.wallet import Wallet from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_transactions +from tests.utils import add_blocks_unlock_reward, add_new_transactions class BaseSignatureTest(unittest.TestCase): diff --git a/tests/cli/test_multisig_spend.py b/tests/cli/test_multisig_spend.py index d76187e6d..0608003f7 100644 --- a/tests/cli/test_multisig_spend.py +++ b/tests/cli/test_multisig_spend.py @@ -6,12 +6,13 @@ from hathor.cli.multisig_spend import create_parser, execute from hathor.conf import HathorSettings from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import create_output_script from hathor.wallet.base_wallet import WalletBalance, WalletOutputInfo from hathor.wallet.util import generate_multisig_address, generate_multisig_redeem_script, generate_signature from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks +from tests.utils import add_blocks_unlock_reward settings = HathorSettings() @@ -74,7 +75,7 @@ def test_spend_multisig(self): tx1.weight = 10 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.manager.propagate_tx(tx1) self.clock.advance(10) diff --git a/tests/cli/test_twin_tx.py b/tests/cli/test_twin_tx.py index a65f38965..4f4aef7df 100644 --- a/tests/cli/test_twin_tx.py +++ b/tests/cli/test_twin_tx.py @@ -6,12 +6,12 @@ from hathor.cli.twin_tx import create_parser, execute from hathor.conf import HathorSettings +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TransactionMetadata from hathor.util import json_loadb from tests import unittest from tests.utils import ( add_blocks_unlock_reward, - add_new_blocks, add_new_transactions, execute_mining, execute_tx_gen, diff --git a/tests/conftest.py b/tests/conftest.py index 475c5b59f..33fb90950 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,18 +1,14 @@ -import asyncio import os import sys -from twisted.internet import asyncioreactor - from hathor.conf import UNITTESTS_SETTINGS_FILEPATH +from hathor.reactor import initialize_global_reactor os.environ['HATHOR_CONFIG_YAML'] = os.environ.get('HATHOR_TEST_CONFIG_YAML', UNITTESTS_SETTINGS_FILEPATH) if sys.platform == 'win32': - # See: https://twistedmatrix.com/documents/current/api/twisted.internet.asyncioreactor.AsyncioSelectorReactor.html - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - # XXX: because rocksdb isn't available on Windows, we force using memory-storage for tests so most of them can run os.environ['HATHOR_TEST_MEMORY_STORAGE'] = 'true' -asyncioreactor.install(asyncio.get_event_loop()) +# TODO: We should remove this call from the module level. +initialize_global_reactor(use_asyncio_reactor=True) diff --git a/tests/consensus/test_consensus.py b/tests/consensus/test_consensus.py index 048a3a440..27daa916a 100644 --- a/tests/consensus/test_consensus.py +++ b/tests/consensus/test_consensus.py @@ -1,16 +1,10 @@ from unittest.mock import MagicMock from hathor.conf import HathorSettings +from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest -from tests.utils import ( - add_blocks_unlock_reward, - add_new_block, - add_new_blocks, - add_new_double_spending, - add_new_transactions, - gen_new_tx, -) +from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions settings = HathorSettings() @@ -82,7 +76,7 @@ def test_revert_block_high_weight(self): tb0 = manager.make_custom_block_template(blocks[-1].hash, [conflicting_tx.hash, conflicting_tx.parents[0]]) b0 = tb0.generate_mining_block(manager.rng, storage=manager.tx_storage) b0.weight = 10 - b0.resolve() + manager.cpu_mining_service.resolve(b0) manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) @@ -144,7 +138,7 @@ def test_dont_revert_block_low_weight(self): # So, it is not enough to revert and this block will be voided as well. b0 = manager.generate_mining_block() b0.parents = [blocks[-1].hash, conflicting_tx.hash, conflicting_tx.parents[0]] - b0.resolve() + manager.cpu_mining_service.resolve(b0) manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) @@ -200,7 +194,7 @@ def test_dont_revert_block_high_weight_transaction_verify_other(self): tb0 = manager.make_custom_block_template(blocks[-1].hash, [conflicting_tx.hash, conflicting_tx.parents[0]]) b0 = tb0.generate_mining_block(manager.rng, storage=manager.tx_storage) b0.weight = 10 - b0.resolve() + manager.cpu_mining_service.resolve(b0) manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) @@ -254,7 +248,7 @@ def test_dont_revert_block_high_weight_verify_both(self): b0 = manager.generate_mining_block() b0.parents = [b0.parents[0], conflicting_tx.hash, conflicting_tx.parents[0]] b0.weight = 10 - b0.resolve() + manager.cpu_mining_service.resolve(b0) manager.verification_service.verify(b0) manager.propagate_tx(b0, fails_silently=False) diff --git a/tests/consensus/test_consensus2.py b/tests/consensus/test_consensus2.py index f8d96b8a4..d8993c69e 100644 --- a/tests/consensus/test_consensus2.py +++ b/tests/consensus/test_consensus2.py @@ -1,7 +1,8 @@ from hathor.graphviz import GraphvizVisualizer +from hathor.simulator.utils import gen_new_tx from tests import unittest from tests.simulation.base import SimulatorTestCase -from tests.utils import add_custom_tx, gen_new_tx +from tests.utils import add_custom_tx class BaseConsensusSimulatorTestCase(SimulatorTestCase): diff --git a/tests/consensus/test_consensus3.py b/tests/consensus/test_consensus3.py index 0aaaac1be..70099975c 100644 --- a/tests/consensus/test_consensus3.py +++ b/tests/consensus/test_consensus3.py @@ -1,7 +1,8 @@ import pytest +from hathor.simulator.utils import add_new_block, add_new_blocks from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_block, add_new_blocks +from tests.utils import add_blocks_unlock_reward class DoubleSpendingTestCase(unittest.TestCase): @@ -34,7 +35,7 @@ def test_double_spending_attempt_1(self): tx_fund0.weight = 1 tx_fund0.parents = manager.get_new_tx_parents() tx_fund0.timestamp = int(self.clock.seconds()) - tx_fund0.resolve() + manager.cpu_mining_service.resolve(tx_fund0) self.assertTrue(manager.propagate_tx(tx_fund0)) def do_step(tx_fund): @@ -43,7 +44,7 @@ def do_step(tx_fund): tx1 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) tx1.weight = 1 tx1.parents = manager.get_new_tx_parents(tx1.timestamp) - tx1.resolve() + manager.cpu_mining_service.resolve(tx1) self.assertTrue(manager.propagate_tx(tx1)) inputs = [] @@ -53,7 +54,7 @@ def do_step(tx_fund): tx2 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx1.timestamp+1) tx2.weight = 1 tx2.parents = manager.get_new_tx_parents(tx2.timestamp) - tx2.resolve() + manager.cpu_mining_service.resolve(tx2) self.assertTrue(manager.propagate_tx(tx2)) inputs = [WalletInputInfo(tx_fund.hash, 0, manager.wallet.get_private_key(addr))] @@ -61,7 +62,7 @@ def do_step(tx_fund): tx3 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) tx3.weight = tx1.weight + tx2.weight + 0.1 tx3.parents = manager.get_new_tx_parents(tx3.timestamp) - tx3.resolve() + manager.cpu_mining_service.resolve(tx3) self.assertTrue(manager.propagate_tx(tx3)) inputs = [WalletInputInfo(tx_fund.hash, 1, manager.wallet.get_private_key(addr))] @@ -69,7 +70,7 @@ def do_step(tx_fund): tx4 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) tx4.weight = 1 tx4.parents = manager.get_new_tx_parents(tx4.timestamp) - tx4.resolve() + manager.cpu_mining_service.resolve(tx4) self.assertTrue(manager.propagate_tx(tx4)) inputs = [] @@ -81,7 +82,7 @@ def do_step(tx_fund): tx5 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx2.timestamp+1) tx5.weight = tx3.weight - tx1.weight + 0.1 tx5.parents = [tx2.hash, tx4.hash] - tx5.resolve() + manager.cpu_mining_service.resolve(tx5) self.assertTrue(manager.propagate_tx(tx5)) return tx5 @@ -124,7 +125,7 @@ def test_double_spending_attempt_2(self): tx_fund0.weight = 1 tx_fund0.parents = manager.get_new_tx_parents() tx_fund0.timestamp = int(self.clock.seconds()) - tx_fund0.resolve() + manager.cpu_mining_service.resolve(tx_fund0) self.assertTrue(manager.propagate_tx(tx_fund0)) def do_step(tx_fund): @@ -133,7 +134,7 @@ def do_step(tx_fund): tx1 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) tx1.weight = 1 tx1.parents = manager.get_new_tx_parents(tx1.timestamp) - tx1.resolve() + manager.cpu_mining_service.resolve(tx1) self.assertTrue(manager.propagate_tx(tx1)) inputs = [] @@ -143,7 +144,7 @@ def do_step(tx_fund): tx2 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx1.timestamp+1) tx2.weight = 1.1 tx2.parents = manager.get_new_tx_parents(tx2.timestamp) - tx2.resolve() + manager.cpu_mining_service.resolve(tx2) self.assertTrue(manager.propagate_tx(tx2)) inputs = [] @@ -153,7 +154,7 @@ def do_step(tx_fund): tx3 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) tx3.weight = 1 tx3.parents = manager.get_new_tx_parents(tx3.timestamp) - tx3.resolve() + manager.cpu_mining_service.resolve(tx3) self.assertTrue(manager.propagate_tx(tx3)) inputs = [] @@ -163,7 +164,7 @@ def do_step(tx_fund): tx4 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx_fund.timestamp+1) tx4.weight = tx1.weight + tx2.weight + 0.1 tx4.parents = manager.get_new_tx_parents(tx4.timestamp) - tx4.resolve() + manager.cpu_mining_service.resolve(tx4) self.assertTrue(manager.propagate_tx(tx4)) inputs = [] @@ -176,7 +177,7 @@ def do_step(tx_fund): tx5 = manager.wallet.prepare_transaction(Transaction, inputs, outputs, tx4.timestamp+1) tx5.weight = 1 tx5.parents = manager.get_new_tx_parents(tx5.timestamp) - tx5.resolve() + manager.cpu_mining_service.resolve(tx5) self.assertTrue(manager.propagate_tx(tx5)) return tx5 diff --git a/tests/consensus/test_soft_voided.py b/tests/consensus/test_soft_voided.py index 5449287c1..d039917ef 100644 --- a/tests/consensus/test_soft_voided.py +++ b/tests/consensus/test_soft_voided.py @@ -2,15 +2,16 @@ from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection, Simulator from hathor.simulator.trigger import StopAfterNTransactions +from hathor.simulator.utils import gen_new_tx from tests import unittest from tests.simulation.base import SimulatorTestCase -from tests.utils import add_custom_tx, gen_new_tx +from tests.utils import add_custom_tx settings = HathorSettings() class BaseSoftVoidedTestCase(SimulatorTestCase): - seed_config = 5988775361793628169 + seed_config = 5988775361793628170 def assertNoParentsAreSoftVoided(self, tx): for h in tx.parents: diff --git a/tests/consensus/test_soft_voided2.py b/tests/consensus/test_soft_voided2.py index 67f24d1d4..584012f71 100644 --- a/tests/consensus/test_soft_voided2.py +++ b/tests/consensus/test_soft_voided2.py @@ -1,9 +1,10 @@ from hathor.conf import HathorSettings from hathor.graphviz import GraphvizVisualizer from hathor.simulator import Simulator +from hathor.simulator.utils import gen_new_tx from tests import unittest from tests.simulation.base import SimulatorTestCase -from tests.utils import BURN_ADDRESS, add_custom_tx, gen_new_tx +from tests.utils import BURN_ADDRESS, add_custom_tx settings = HathorSettings() diff --git a/tests/consensus/test_soft_voided3.py b/tests/consensus/test_soft_voided3.py index 721e42d47..77e8d4d9a 100644 --- a/tests/consensus/test_soft_voided3.py +++ b/tests/consensus/test_soft_voided3.py @@ -2,9 +2,10 @@ from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection, Simulator from hathor.simulator.trigger import StopAfterNTransactions +from hathor.simulator.utils import gen_new_tx from tests import unittest from tests.simulation.base import SimulatorTestCase -from tests.utils import add_custom_tx, gen_custom_tx, gen_new_tx +from tests.utils import add_custom_tx, gen_custom_tx settings = HathorSettings() diff --git a/tests/consensus/test_soft_voided4.py b/tests/consensus/test_soft_voided4.py index 57a9cd4c2..3776c1aba 100644 --- a/tests/consensus/test_soft_voided4.py +++ b/tests/consensus/test_soft_voided4.py @@ -2,9 +2,10 @@ from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection, Simulator from hathor.simulator.trigger import StopAfterNTransactions +from hathor.simulator.utils import gen_new_double_spending from tests import unittest from tests.simulation.base import SimulatorTestCase -from tests.utils import add_custom_tx, gen_new_double_spending +from tests.utils import add_custom_tx settings = HathorSettings() diff --git a/tests/event/test_base_event.py b/tests/event/test_base_event.py index d99144781..5751ae988 100644 --- a/tests/event/test_base_event.py +++ b/tests/event/test_base_event.py @@ -25,7 +25,6 @@ @pytest.mark.parametrize('group_id', [None, 0, 1, 1000]) def test_create_base_event(event_id, group_id): event = BaseEvent( - peer_id='some_peer', id=event_id, timestamp=123.3, type=EventType.VERTEX_METADATA_CHANGED, @@ -34,7 +33,6 @@ def test_create_base_event(event_id, group_id): ) expected = dict( - peer_id='some_peer', id=event_id, timestamp=123.3, type='VERTEX_METADATA_CHANGED', @@ -76,7 +74,6 @@ def test_create_base_event(event_id, group_id): def test_create_base_event_fail_id(event_id): with pytest.raises(ValidationError): BaseEvent( - peer_id='some_peer', id=event_id, timestamp=123.3, type=EventType.VERTEX_METADATA_CHANGED, @@ -88,7 +85,6 @@ def test_create_base_event_fail_id(event_id): def test_create_base_event_fail_group_id(group_id): with pytest.raises(ValidationError): BaseEvent( - peer_id='some_peer', id=0, timestamp=123.3, type=EventType.VERTEX_METADATA_CHANGED, @@ -100,7 +96,6 @@ def test_create_base_event_fail_group_id(group_id): def test_create_base_event_fail_data_type(): with pytest.raises(ValidationError): BaseEvent( - peer_id='some_peer', id=0, timestamp=123.3, type=EventType.VERTEX_METADATA_CHANGED, diff --git a/tests/event/test_event_reorg.py b/tests/event/test_event_reorg.py index 7d145482b..c941c9278 100644 --- a/tests/event/test_event_reorg.py +++ b/tests/event/test_event_reorg.py @@ -1,8 +1,9 @@ from hathor.conf import HathorSettings from hathor.event.model.event_type import EventType from hathor.event.storage import EventMemoryStorage +from hathor.simulator.utils import add_new_blocks from tests import unittest -from tests.utils import BURN_ADDRESS, add_new_blocks, get_genesis_key +from tests.utils import BURN_ADDRESS, get_genesis_key settings = HathorSettings() @@ -37,7 +38,7 @@ def test_reorg_events(self): tb0 = self.manager.make_custom_block_template(block_to_replace.parents[0], block_to_replace.parents[1:]) b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage, address=BURN_ADDRESS) b0.weight = 10 - b0.resolve() + self.manager.cpu_mining_service.resolve(b0) self.manager.verification_service.verify(b0) self.manager.propagate_tx(b0, fails_silently=False) self.log.debug('reorg block propagated') @@ -76,9 +77,9 @@ def test_reorg_events(self): (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[9].hash_hex}), (EventType.REORG_STARTED, {'reorg_size': 2, 'previous_best_block': blocks[9].hash_hex, 'new_best_block': b0.hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': b0.hash_hex}), (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[9].hash_hex}), (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[8].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': b0.hash_hex}), (EventType.REORG_FINISHED, {}), (EventType.NEW_VERTEX_ACCEPTED, {'hash': b0.hash_hex}), ] diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index 5b9ba4fc4..9acaab38d 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -51,15 +51,17 @@ def test_only_load(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=4, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=4, stream_id=stream_id) # noqa: E501 ] + responses = _remove_timestamp(responses) + expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' def test_single_chain_one_block(self): @@ -71,21 +73,23 @@ def test_single_chain_one_block(self): expected = [ # LOAD_STATED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 ] + responses = _remove_timestamp(responses) + expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' def test_single_chain_blocks_and_transactions(self): @@ -97,58 +101,60 @@ def test_single_chain_blocks_and_transactions(self): expected = [ # LOAD_STATED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=32, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=34, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=35, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.4904519466213, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656776158409354, score=0.0, first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585413276128, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 ] + responses = _remove_timestamp(responses) + expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' def test_reorg(self): @@ -160,41 +166,110 @@ def test_reorg(self): expected = [ # LOAD_STATED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager1 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, voiding it - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=9, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=13, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 # REORG_STARTED caused by a new block from manager2 (below) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=14, timestamp=1578879064.25, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', new_best_block='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578879064.25, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', new_best_block='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, un-voiding it as it's now part of the best blockchain # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=15, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # REORG_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=19, timestamp=1578879064.25, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578879064.25, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 ] + responses = _remove_timestamp(responses) + expected = _remove_timestamp(expected) + assert responses == expected, f'expected: {expected}\n\nactual: {responses}' + + def test_unvoided_transaction(self): + stream_id = self.manager._event_manager._stream_id + Scenario.UNVOIDED_TRANSACTION.simulate(self.simulator, self.manager) + self._start_stream() + + responses = self._get_success_responses() + + expected = [ + # LOAD_STATED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, type=EventType.LOAD_STARTED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # LOAD_FINISHED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, type=EventType.LOAD_FINISHED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new tx + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output, and one VERTEX_METADATA_CHANGED adding the new tx as twin/conflict of the previous tx # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=[], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new tx that is a twin of the previous one. It's voided. + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each twin tx, inverting the voided state of them. # noqa E501 + # The order of events is important, we receive the voided txs first, then reverse topological ordering. + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.0005, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, first_block='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', nonce=0, timestamp=1578879090, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='66d748139afcb9105b61b34f5c93baa9e856c58e5873ff7c194bbc1adb3e9286', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + ] + + responses = _remove_timestamp(responses) + expected = _remove_timestamp(expected) assert responses == expected, f'expected: {expected}\n\nactual: {responses}' def _start_stream(self) -> None: @@ -203,6 +278,15 @@ def _start_stream(self) -> None: self.simulator.run(60) +def _remove_timestamp(responses: list[EventResponse]) -> list[EventResponse]: + for response in responses: + # We remove the timestamp from the comparison as it's not important and can be affected by other parts of + # the code. + del response.event.timestamp + + return responses + + class MemoryEventSimulationScenariosTest(BaseEventSimulationScenariosTest, MemoryEventSimulationTester): __test__ = True diff --git a/tests/event/websocket/test_factory.py b/tests/event/websocket/test_factory.py index 8bdb935fb..b100fc6d5 100644 --- a/tests/event/websocket/test_factory.py +++ b/tests/event/websocket/test_factory.py @@ -74,7 +74,13 @@ def test_broadcast_event(can_receive_event: bool) -> None: if not can_receive_event: return connection.send_event_response.assert_not_called() - response = EventResponse(event=event, latest_event_id=n_starting_events - 1, stream_id=stream_id) + response = EventResponse( + peer_id='my_peer_id', + network='my_network', + event=event, + latest_event_id=n_starting_events - 1, + stream_id=stream_id + ) connection.send_event_response.assert_called_once_with(response) @@ -133,7 +139,13 @@ def test_send_next_event_to_connection(next_expected_event_id: int, can_receive_ calls = [] for _id in range(next_expected_event_id, n_starting_events): event = EventMocker.create_event(_id) - response = EventResponse(event=event, latest_event_id=n_starting_events - 1, stream_id=stream_id) + response = EventResponse( + peer_id='my_peer_id', + network='my_network', + event=event, + latest_event_id=n_starting_events - 1, + stream_id=stream_id + ) calls.append(call(response)) assert connection.send_event_response.call_count == n_starting_events - next_expected_event_id @@ -150,4 +162,9 @@ def _get_factory( event = EventMocker.create_event(event_id) event_storage.save_event(event) - return EventWebsocketFactory(clock, event_storage) + return EventWebsocketFactory( + peer_id='my_peer_id', + network='my_network', + reactor=clock, + event_storage=event_storage + ) diff --git a/tests/event/websocket/test_protocol.py b/tests/event/websocket/test_protocol.py index b7fa83544..2f3a4dcfe 100644 --- a/tests/event/websocket/test_protocol.py +++ b/tests/event/websocket/test_protocol.py @@ -83,8 +83,9 @@ def test_send_event_response(): protocol = EventWebsocketProtocol() protocol.sendMessage = Mock() response = EventResponse( + peer_id='my_peer_id', + network='my_network', event=BaseEvent( - peer_id='some_peer_id', id=10, timestamp=123, type=EventType.VERTEX_METADATA_CHANGED, @@ -96,14 +97,14 @@ def test_send_event_response(): protocol.send_event_response(response) - expected_payload = b'{"type":"EVENT","event":{"peer_id":"some_peer_id","id":10,"timestamp":123.0,' \ - b'"type":"VERTEX_METADATA_CHANGED","data":{"hash":"abc","nonce":123,"timestamp":456,' \ - b'"version":1,"weight":10.0,"inputs":[],"outputs":[],"parents":[],"tokens":[],' \ - b'"token_name":null,"token_symbol":null,"metadata":{"hash":"abc","spent_outputs":[],' \ - b'"conflict_with":[],"voided_by":[],"received_by":[],"children":[],"twins":[],' \ - b'"accumulated_weight":10.0,"score":20.0,"first_block":null,"height":100,' \ - b'"validation":"validation"},"aux_pow":null},"group_id":null},"latest_event_id":10,' \ - b'"stream_id":"stream_id"}' + expected_payload = (b'{"type":"EVENT","peer_id":"my_peer_id","network":"my_network","event":{"id":10,' + b'"timestamp":123.0,"type":"VERTEX_METADATA_CHANGED","data":{"hash":"abc","nonce":123,' + b'"timestamp":456,"version":1,"weight":10.0,"inputs":[],"outputs":[],"parents":[],' + b'"tokens":[],"token_name":null,"token_symbol":null,"metadata":{"hash":"abc",' + b'"spent_outputs":[],"conflict_with":[],"voided_by":[],"received_by":[],"children":[],' + b'"twins":[],"accumulated_weight":10.0,"score":20.0,"first_block":null,"height":100,' + b'"validation":"validation"},"aux_pow":null},"group_id":null},"latest_event_id":10,' + b'"stream_id":"stream_id"}') protocol.sendMessage.assert_called_once_with(expected_payload) diff --git a/tests/feature_activation/test_bit_signaling_service.py b/tests/feature_activation/test_bit_signaling_service.py index b46951d8b..f3b24e140 100644 --- a/tests/feature_activation/test_bit_signaling_service.py +++ b/tests/feature_activation/test_bit_signaling_service.py @@ -15,7 +15,6 @@ from unittest.mock import Mock import pytest -from structlog.testing import capture_logs from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature import Feature @@ -272,17 +271,15 @@ def get_bits_description_mock(block): support_features=support_features, not_support_features=not_support_features, ) + logger_mock = Mock() + service._log = logger_mock - with capture_logs() as logs: - service.start() + service.start() - expected_log = dict( - log_level='warning', + logger_mock.warn.assert_called_with( + 'Considering the current best block, there are signaled features outside their signaling period. ' + 'Therefore, signaling for them has no effect. Make sure you are signaling for the desired features.', best_block_height=123, best_block_hash='abc', non_signaling_features=non_signaling_features, - event='Considering the current best block, there are signaled features outside their signaling period. ' - 'Therefore, signaling for them has no effect. Make sure you are signaling for the desired features.', ) - - assert expected_log in logs diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index 4cc781095..4a01069d3 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -19,7 +19,12 @@ from hathor.conf import HathorSettings from hathor.feature_activation.feature import Feature -from hathor.feature_activation.feature_service import FeatureService +from hathor.feature_activation.feature_service import ( + BlockIsMissingSignal, + BlockIsSignaling, + BlockSignalingState, + FeatureService, +) from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.model.feature_description import FeatureDescription from hathor.feature_activation.model.feature_state import FeatureState @@ -43,8 +48,8 @@ def _get_blocks_and_storage() -> tuple[list[Block], TransactionStorage]: 0b0011, 0b0001, - 0b0000, # 8: boundary block - 0b0000, + 0b0010, # 8: boundary block + 0b0110, 0b0000, 0b0000, @@ -475,10 +480,11 @@ def test_caching_mechanism(block_mocks: list[Block], tx_storage: TransactionStor assert result1 == FeatureState.ACTIVE assert calculate_new_state_mock.call_count == 4 + calculate_new_state_mock.reset_mock() result2 = service.get_state(block=block, feature=Feature.NOP_FEATURE_1) assert result2 == FeatureState.ACTIVE - assert calculate_new_state_mock.call_count == 4 + assert calculate_new_state_mock.call_count == 0 @pytest.mark.parametrize('block_height', [16, 17, 18, 19]) @@ -651,3 +657,58 @@ def test_get_ancestor_at_height_voided( assert result == block_mocks[ancestor_height] assert result.get_height() == ancestor_height assert cast(Mock, tx_storage.get_transaction_by_height).call_count == 0 + + +@pytest.mark.parametrize( + ['bit', 'threshold', 'block_height', 'signaling_state'], + [ + (0, 4, 0, BlockIsSignaling()), + (0, 4, 3, BlockIsSignaling()), + (0, 4, 7, BlockIsSignaling()), + (0, 4, 8, BlockIsSignaling()), + (0, 4, 11, BlockIsSignaling()), + (0, 4, 12, BlockIsSignaling()), + + (1, 4, 0, BlockIsSignaling()), + (1, 4, 3, BlockIsSignaling()), + (1, 4, 7, BlockIsSignaling()), + (1, 4, 8, BlockIsSignaling()), + (1, 4, 9, BlockIsSignaling()), + (1, 4, 10, BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)), + (1, 4, 11, BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)), + (1, 4, 12, BlockIsSignaling()), + + (2, 2, 8, BlockIsSignaling()), + (2, 2, 9, BlockIsSignaling()), + (2, 2, 10, BlockIsSignaling()), + (2, 2, 11, BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)), + (2, 2, 12, BlockIsSignaling()), + ] +) +def test_check_must_signal( + tx_storage: TransactionStorage, + block_mocks: list[Block], + bit: int, + threshold: int, + block_height: int, + signaling_state: BlockSignalingState +) -> None: + feature_settings = FeatureSettings( + evaluation_interval=4, + default_threshold=threshold, + features={ + Feature.NOP_FEATURE_1: Criteria( + bit=bit, + start_height=0, + timeout_height=12, + lock_in_on_timeout=True, + version='0.0.0' + ) + } + ) + service = FeatureService(feature_settings=feature_settings, tx_storage=tx_storage) + block = block_mocks[block_height] + + result = service.is_signaling_mandatory_features(block) + + assert result == signaling_state diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 5b5f0b475..2e7e1f307 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -18,6 +18,7 @@ import pytest from hathor.builder import Builder +from hathor.conf.get_settings import get_settings from hathor.feature_activation import feature_service as feature_service_module from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService @@ -25,15 +26,17 @@ from hathor.feature_activation.resources.feature import FeatureResource from hathor.feature_activation.settings import Settings as FeatureSettings from hathor.simulator import FakeConnection -from hathor.simulator.trigger import StopAfterNMinedBlocks +from hathor.transaction.exceptions import BlockMustSignalError from tests import unittest from tests.resources.base_resource import StubSite from tests.simulation.base import SimulatorTestCase -from tests.utils import HAS_ROCKSDB +from tests.utils import HAS_ROCKSDB, add_new_blocks class BaseFeatureSimulationTest(SimulatorTestCase): - builder: Builder + def get_simulator_builder(self) -> Builder: + """Return a pre-configured builder to be used in tests.""" + raise NotImplementedError @staticmethod def _get_result(web_client: StubSite) -> dict[str, Any]: @@ -46,20 +49,17 @@ def _get_result(web_client: StubSite) -> dict[str, Any]: return result @staticmethod - def _get_state_mock_block_height_calls(get_state_mock: Mock) -> list[int]: - """Returns the heights of blocks that get_state_mock was called with.""" - return [call.kwargs['block'].get_height() for call in get_state_mock.call_args_list] + def _calculate_new_state_mock_block_height_calls(calculate_new_state_mock: Mock) -> list[int]: + """Return the heights of blocks that calculate_new_state_mock was called with.""" + return [call.kwargs['boundary_block'].get_height() for call in calculate_new_state_mock.call_args_list] def test_feature(self) -> None: """ Tests that a feature goes through all possible states in the correct block heights, and also assert internal - method call counts and args to make sure we're executing it in the most performatic way. + method calls to make sure we're executing it in the intended, most performatic way. """ - artifacts = self.simulator.create_artifacts(self.builder) - manager = artifacts.manager - manager.allow_mining_without_peers() - feature_settings = FeatureSettings( + enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -75,10 +75,12 @@ def test_feature(self) -> None: } ) - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts.tx_storage - ) + settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + builder = self.get_simulator_builder().set_settings(settings) + artifacts = self.simulator.create_artifacts(builder) + feature_service = artifacts.feature_service + manager = artifacts.manager + feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -86,19 +88,16 @@ def test_feature(self) -> None: ) web_client = StubSite(feature_resource) - miner = self.simulator.create_miner(manager, hashpower=1e6) - miner.start() - - get_state_mock = Mock(wraps=feature_service.get_state) + calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) with ( - patch.object(FeatureService, 'get_state', get_state_mock), + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) ): # at the beginning, the feature is DEFINED: - trigger = StopAfterNMinedBlocks(miner, quantity=10) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=10, @@ -116,15 +115,15 @@ def test_feature(self) -> None: ) ] ) - # so we query states all the way down to genesis: - assert self._get_state_mock_block_height_calls(get_state_mock) == [10, 8, 4, 0] + # so we calculate states all the way down to the first evaluation boundary (after genesis): + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 # no blocks are voided, so we only use the height index, and not get_ancestor_iteratively: assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 19, the feature is DEFINED, just before becoming STARTED: - trigger = StopAfterNMinedBlocks(miner, quantity=9) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 9) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=19, @@ -142,14 +141,14 @@ def test_feature(self) -> None: ) ] ) - # so we query states from block 19 to 8, as it's cached: - assert self._get_state_mock_block_height_calls(get_state_mock) == [19, 16, 12, 8] + # so we calculate states down to block 12, as block 8's state is saved: + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 12 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 20, the feature becomes STARTED: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=20, @@ -167,13 +166,16 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [20, 16] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 20 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + + # we add one block before resetting the mock, just to make sure block 20 gets a chance to be saved + add_new_blocks(manager, 1) + calculate_new_state_mock.reset_mock() # at block 55, the feature is STARTED, just before becoming MUST_SIGNAL: - trigger = StopAfterNMinedBlocks(miner, quantity=35) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 34) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=55, @@ -191,15 +193,13 @@ def test_feature(self) -> None: ) ] ) - assert ( - self._get_state_mock_block_height_calls(get_state_mock) == [55, 52, 48, 44, 40, 36, 32, 28, 24, 20] - ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 24 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 56, the feature becomes MUST_SIGNAL: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=56, @@ -217,13 +217,26 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [56, 52] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 56 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + + # we add one block before resetting the mock, just to make sure block 56 gets a chance to be saved + add_new_blocks(manager, 1, signal_bits=0b1) + calculate_new_state_mock.reset_mock() + + # if we try to propagate a non-signaling block, it is not accepted + non_signaling_block = manager.generate_mining_block() + manager.cpu_mining_service.resolve(non_signaling_block) + non_signaling_block.signal_bits = 0b10 + + with pytest.raises(BlockMustSignalError): + manager.verification_service.verify(non_signaling_block) + + assert not manager.propagate_tx(non_signaling_block) # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: - trigger = StopAfterNMinedBlocks(miner, quantity=3) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, num_blocks=2, signal_bits=0b1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=59, @@ -231,7 +244,7 @@ def test_feature(self) -> None: dict( name='NOP_FEATURE_1', state='MUST_SIGNAL', - acceptance=0, + acceptance=0.75, threshold=0.75, start_height=20, timeout_height=60, @@ -241,15 +254,14 @@ def test_feature(self) -> None: ) ] ) - assert ( - self._get_state_mock_block_height_calls(get_state_mock) == [59, 56] - ) + # we don't need to calculate any new state, as block 56's state is saved: + assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 60, the feature becomes LOCKED_IN: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=60, @@ -267,13 +279,16 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [60, 56] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 60 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + + # we add one block before resetting the mock, just to make sure block 60 gets a chance to be saved + add_new_blocks(manager, 1) + calculate_new_state_mock.reset_mock() # at block 71, the feature is LOCKED_IN, just before becoming ACTIVE: - trigger = StopAfterNMinedBlocks(miner, quantity=11) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=71, @@ -291,15 +306,13 @@ def test_feature(self) -> None: ) ] ) - assert ( - self._get_state_mock_block_height_calls(get_state_mock) == [71, 68, 64, 60] - ) + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 64 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() # at block 72, the feature becomes ACTIVE, forever: - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=72, @@ -317,16 +330,13 @@ def test_feature(self) -> None: ) ] ) - assert self._get_state_mock_block_height_calls(get_state_mock) == [72, 68] + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 72 assert get_ancestor_iteratively_mock.call_count == 0 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() def test_reorg(self) -> None: - artifacts = self.simulator.create_artifacts(self.builder) - manager = artifacts.manager - manager.allow_mining_without_peers() - feature_settings = FeatureSettings( + enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -340,10 +350,13 @@ def test_reorg(self) -> None: ) } ) - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts.tx_storage - ) + + settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + builder = self.get_simulator_builder().set_settings(settings) + artifacts = self.simulator.create_artifacts(builder) + feature_service = artifacts.feature_service + manager = artifacts.manager + feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -351,19 +364,8 @@ def test_reorg(self) -> None: ) web_client = StubSite(feature_resource) - # 4 blocks per evaluation interval, and the genesis is skipped - signal_bits = [ - 0b0000, 0b0000, 0b0000, # 0% acceptance - 0b0000, 0b0000, 0b0010, 0b0000, # 25% acceptance - 0b0010, 0b0000, 0b0010, 0b0010, # 75% acceptance - ] - - miner = self.simulator.create_miner(manager, hashpower=1e6, signal_bits=signal_bits) - miner.start() - # at the beginning, the feature is DEFINED: - trigger = StopAfterNMinedBlocks(miner, quantity=0) - self.simulator.run(36000, trigger=trigger) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=0, @@ -383,8 +385,8 @@ def test_reorg(self) -> None: ) # at block 4, the feature becomes STARTED with 0% acceptance - trigger = StopAfterNMinedBlocks(miner, quantity=4) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 4) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=4, @@ -403,9 +405,10 @@ def test_reorg(self) -> None: ] ) - # at block 7, acceptance was 25% - trigger = StopAfterNMinedBlocks(miner, quantity=3) - self.simulator.run(36000, trigger=trigger) + # at block 7, acceptance is 25% (we're signaling 1 block out of 4) + add_new_blocks(manager, 2) + add_new_blocks(manager, 1, signal_bits=0b10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=7, @@ -424,9 +427,11 @@ def test_reorg(self) -> None: ] ) - # at block 11, acceptance was 75%, so the feature will be locked-in in the next block - trigger = StopAfterNMinedBlocks(miner, quantity=4) - self.simulator.run(36000, trigger=trigger) + # at block 11, acceptance is 75% (we're signaling 3 blocks out of 4), + # so the feature will be locked-in in the next block + add_new_blocks(manager, 1) + add_new_blocks(manager, 3, signal_bits=0b10) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=11, @@ -446,8 +451,8 @@ def test_reorg(self) -> None: ) # at block 12, the feature is locked-in - trigger = StopAfterNMinedBlocks(miner, quantity=1) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=12, @@ -467,8 +472,8 @@ def test_reorg(self) -> None: ) # at block 16, the feature is activated - trigger = StopAfterNMinedBlocks(miner, quantity=4) - self.simulator.run(36000, trigger=trigger) + add_new_blocks(manager, 4) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=16, @@ -487,19 +492,14 @@ def test_reorg(self) -> None: ] ) - miner.stop() - - # We then create a new manager with a miner that mines one more block (17 vs 16), so its blockchain wins when + # We then create a new manager with one more block (17 vs 16), so its blockchain wins when # both managers are connected. This causes a reorg and the feature goes back to the STARTED state. - manager2 = self.simulator.create_peer() - manager2.allow_mining_without_peers() - - miner2 = self.simulator.create_miner(manager2, hashpower=1e6) + builder2 = self.get_simulator_builder().set_settings(settings) + artifacts2 = self.simulator.create_artifacts(builder2) + manager2 = artifacts2.manager - miner2.start() - trigger = StopAfterNMinedBlocks(miner2, quantity=17) - self.simulator.run(36000, trigger=trigger) - miner2.stop() + add_new_blocks(manager2, 17) + self.simulator.run(60) connection = FakeConnection(manager, manager2) self.simulator.add_connection(connection) @@ -525,33 +525,33 @@ def test_reorg(self) -> None: class BaseMemoryStorageFeatureSimulationTest(BaseFeatureSimulationTest): - def setUp(self): - super().setUp() - self.builder = self.simulator.get_default_builder() + def get_simulator_builder(self) -> Builder: + return self.simulator.get_default_builder() @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') class BaseRocksDBStorageFeatureSimulationTest(BaseFeatureSimulationTest): - def setUp(self): - super().setUp() + def get_rocksdb_directory(self) -> str: import tempfile + tmp_dir = tempfile.mkdtemp() + self.tmpdirs.append(tmp_dir) + return tmp_dir - self.rocksdb_directory = tempfile.mkdtemp() - self.tmpdirs.append(self.rocksdb_directory) - - self.builder = self.simulator.get_default_builder() \ - .use_rocksdb(path=self.rocksdb_directory) \ + def get_simulator_builder_from_dir(self, rocksdb_directory: str) -> Builder: + return self.simulator.get_default_builder() \ + .use_rocksdb(path=rocksdb_directory) \ .disable_full_verification() + def get_simulator_builder(self) -> Builder: + rocksdb_directory = self.get_rocksdb_directory() + return self.get_simulator_builder_from_dir(rocksdb_directory) + def test_feature_from_existing_storage(self) -> None: """ Tests that feature states are correctly retrieved from an existing storage, so no recalculation is required. """ - artifacts1 = self.simulator.create_artifacts(self.builder) - manager1 = artifacts1.manager - manager1.allow_mining_without_peers() - feature_settings = FeatureSettings( + enable_usage=True, evaluation_interval=4, max_signal_bits=4, default_threshold=3, @@ -566,31 +566,33 @@ def test_feature_from_existing_storage(self) -> None: } ) - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts1.tx_storage - ) + settings = get_settings()._replace(FEATURE_ACTIVATION=feature_settings) + rocksdb_dir = self.get_rocksdb_directory() + builder1 = self.get_simulator_builder_from_dir(rocksdb_dir).set_settings(settings) + artifacts1 = self.simulator.create_artifacts(builder1) + feature_service1 = artifacts1.feature_service + manager1 = artifacts1.manager + feature_resource = FeatureResource( feature_settings=feature_settings, - feature_service=feature_service, + feature_service=feature_service1, tx_storage=artifacts1.tx_storage ) web_client = StubSite(feature_resource) - miner = self.simulator.create_miner(manager1, hashpower=1e6) - miner.start() - - get_state_mock = Mock(wraps=feature_service.get_state) + calculate_new_state_mock = Mock(wraps=feature_service1._calculate_new_state) get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) with ( - patch.object(FeatureService, 'get_state', get_state_mock), + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) ): assert artifacts1.tx_storage.get_vertices_count() == 3 # genesis vertices in the storage - trigger = StopAfterNMinedBlocks(miner, quantity=64) - self.simulator.run(36000, trigger=trigger) + # we add 64 blocks so the feature becomes active. It would be active by timeout anyway, + # we just set signal bits to conform with the MUST_SIGNAL phase. + add_new_blocks(manager1, 64, signal_bits=0b1) + self.simulator.run(60) result = self._get_result(web_client) assert result == dict( block_height=64, @@ -608,28 +610,22 @@ def test_feature_from_existing_storage(self) -> None: ) ] ) - # feature states have to be calculated for all blocks in evaluation interval boundaries, as this is the - # first run: - assert self._get_state_mock_block_height_calls(get_state_mock) == list(range(64, -4, -4)) + # feature states have to be calculated for all blocks in evaluation interval boundaries, + # down to the first one (after genesis), as this is the first run: + assert min(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 4 # no blocks are voided, so we only use the height index: assert get_ancestor_iteratively_mock.call_count == 0 assert artifacts1.tx_storage.get_vertices_count() == 67 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() - miner.stop() manager1.stop() artifacts1.rocksdb_storage.close() - builder = self.simulator.get_default_builder() \ - .use_rocksdb(path=self.rocksdb_directory) \ - .disable_full_verification() - artifacts2 = self.simulator.create_artifacts(builder) + # new builder is created with the same storage from the previous manager + builder2 = self.get_simulator_builder_from_dir(rocksdb_dir).set_settings(settings) + artifacts2 = self.simulator.create_artifacts(builder2) + feature_service = artifacts2.feature_service - # new feature_service is created with the same storage generated above - feature_service = FeatureService( - feature_settings=feature_settings, - tx_storage=artifacts2.tx_storage - ) feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -637,19 +633,20 @@ def test_feature_from_existing_storage(self) -> None: ) web_client = StubSite(feature_resource) - get_state_mock = Mock(wraps=feature_service.get_state) + calculate_new_state_mock = Mock(wraps=feature_service._calculate_new_state) get_ancestor_iteratively_mock = Mock(wraps=feature_service_module._get_ancestor_iteratively) with ( - patch.object(FeatureService, 'get_state', get_state_mock), + patch.object(FeatureService, '_calculate_new_state', calculate_new_state_mock), patch.object(feature_service_module, '_get_ancestor_iteratively', get_ancestor_iteratively_mock) ): # the new storage starts populated assert artifacts2.tx_storage.get_vertices_count() == 67 - self.simulator.run(3600) + self.simulator.run(60) result = self._get_result(web_client) + # the result should be the same as before assert result == dict( block_height=64, features=[ @@ -666,11 +663,11 @@ def test_feature_from_existing_storage(self) -> None: ) ] ) - # features states are not queried for previous blocks, as they have it cached: - assert self._get_state_mock_block_height_calls(get_state_mock) == [64] + # features states are not calculate for any block, as they're all saved: + assert len(self._calculate_new_state_mock_block_height_calls(calculate_new_state_mock)) == 0 assert get_ancestor_iteratively_mock.call_count == 0 assert artifacts2.tx_storage.get_vertices_count() == 67 - get_state_mock.reset_mock() + calculate_new_state_mock.reset_mock() class SyncV1MemoryStorageFeatureSimulationTest(unittest.SyncV1Params, BaseMemoryStorageFeatureSimulationTest): diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index c2705b032..3aabf4b3d 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -57,9 +57,8 @@ def test_all_default(self): self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) self.assertIsNone(manager.wallet) self.assertEqual('unittests', manager.network) - self.assertNotIn(SyncVersion.V1, manager.connections._sync_factories) - self.assertIn(SyncVersion.V1_1, manager.connections._sync_factories) - self.assertNotIn(SyncVersion.V2, manager.connections._sync_factories) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V2)) self.assertFalse(self.resources_builder._built_prometheus) self.assertFalse(self.resources_builder._built_status) self.assertFalse(manager._enable_event_queue) @@ -102,23 +101,15 @@ def test_memory_storage(self): def test_memory_storage_with_rocksdb_indexes(self): self._build_with_error(['--memory-storage', '--x-rocksdb-indexes'], 'RocksDB indexes require RocksDB data') - def test_sync_v1_0_legacy(self): - manager = self._build(['--memory-storage', '--x-enable-legacy-sync-v1_0']) - self.assertIn(SyncVersion.V1, manager.connections._sync_factories) - self.assertIn(SyncVersion.V1_1, manager.connections._sync_factories) - self.assertNotIn(SyncVersion.V2, manager.connections._sync_factories) - def test_sync_bridge(self): manager = self._build(['--memory-storage', '--x-sync-bridge']) - self.assertNotIn(SyncVersion.V1, manager.connections._sync_factories) - self.assertIn(SyncVersion.V1_1, manager.connections._sync_factories) - self.assertIn(SyncVersion.V2, manager.connections._sync_factories) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) def test_sync_v2_only(self): manager = self._build(['--memory-storage', '--x-sync-v2-only']) - self.assertNotIn(SyncVersion.V1_1, manager.connections._sync_factories) - self.assertNotIn(SyncVersion.V1, manager.connections._sync_factories) - self.assertIn(SyncVersion.V2, manager.connections._sync_factories) + self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) + self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) def test_keypair_wallet(self): manager = self._build(['--memory-storage', '--wallet', 'keypair']) diff --git a/tests/others/test_init_manager.py b/tests/others/test_init_manager.py index 76d1d2f2e..8ca7228a2 100644 --- a/tests/others/test_init_manager.py +++ b/tests/others/test_init_manager.py @@ -2,17 +2,12 @@ from hathor.conf import HathorSettings from hathor.pubsub import PubSubManager +from hathor.simulator.utils import add_new_block, add_new_blocks from hathor.transaction import BaseTransaction from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest from tests.unittest import TestBuilder -from tests.utils import ( - add_blocks_unlock_reward, - add_new_block, - add_new_blocks, - add_new_double_spending, - add_new_transactions, -) +from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions settings = HathorSettings() @@ -55,7 +50,6 @@ def test_invalid_arguments(self): builder = TestBuilder() builder.set_tx_storage(self.tx_storage) builder.disable_sync_v1() - builder.disable_sync_v1_1() builder.disable_sync_v2() builder.build() diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index d149478cb..41c4ddb25 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -7,10 +7,11 @@ from hathor.p2p.peer_id import PeerId from hathor.p2p.protocol import HathorProtocol from hathor.pubsub import HathorEvents +from hathor.simulator.utils import add_new_blocks from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage from hathor.wallet import Wallet from tests import unittest -from tests.utils import HAS_ROCKSDB, add_new_blocks +from tests.utils import HAS_ROCKSDB class BaseMetricsTest(unittest.TestCase): diff --git a/tests/p2p/test_double_spending.py b/tests/p2p/test_double_spending.py index 02c4b7441..9eb408ee2 100644 --- a/tests/p2p/test_double_spending.py +++ b/tests/p2p/test_double_spending.py @@ -1,6 +1,7 @@ from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_tx +from tests.utils import add_blocks_unlock_reward, add_new_tx class BaseHathorSyncMethodsTestCase(unittest.TestCase): @@ -42,19 +43,19 @@ def test_simple_double_spending(self): tx1.weight = 10 tx1.parents = self.manager1.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager1.cpu_mining_service.resolve(tx1) tx2 = Transaction.create_from_struct(tx1.get_struct()) tx2.weight = 10 tx2.parents = tx2.parents[::-1] tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager1.cpu_mining_service.resolve(tx2) self.assertNotEqual(tx1.hash, tx2.hash) tx3 = Transaction.create_from_struct(tx1.get_struct()) tx3.weight = 11 tx3.timestamp = int(self.clock.seconds()) - tx3.resolve() + self.manager1.cpu_mining_service.resolve(tx3) self.assertNotEqual(tx1.hash, tx3.hash) self.assertNotEqual(tx2.hash, tx3.hash) @@ -156,7 +157,7 @@ def test_double_spending_propagation(self): tx1.weight = 5 tx1.parents = self.manager1.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager1.cpu_mining_service.resolve(tx1) address = self.manager1.wallet.get_unused_address_bytes() value = 500 @@ -170,7 +171,7 @@ def test_double_spending_propagation(self): tx4.weight = 5 tx4.parents = self.manager1.get_new_tx_parents() tx4.timestamp = int(self.clock.seconds()) - tx4.resolve() + self.manager1.cpu_mining_service.resolve(tx4) self.assertEqual(tx1.inputs[0].tx_id, tx4.inputs[0].tx_id) self.assertEqual(tx1.inputs[0].index, tx4.inputs[0].index) @@ -193,7 +194,7 @@ def test_double_spending_propagation(self): tx2.weight = 5 tx2.parents = tx1.parents tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager1.cpu_mining_service.resolve(tx2) self.clock.advance(15) self.manager1.propagate_tx(tx2) self.clock.advance(15) @@ -212,7 +213,7 @@ def test_double_spending_propagation(self): tx3.weight = 5 tx3.parents = [tx1.hash, tx1.parents[0]] tx3.timestamp = int(self.clock.seconds()) - tx3.resolve() + self.manager1.cpu_mining_service.resolve(tx3) self.clock.advance(15) self.assertTrue(self.manager1.propagate_tx(tx3)) self.clock.advance(15) @@ -243,7 +244,7 @@ def test_double_spending_propagation(self): tx5.weight = 5 tx5.parents = tx1.parents tx5.timestamp = int(self.clock.seconds()) - tx5.resolve() + self.manager1.cpu_mining_service.resolve(tx5) self.clock.advance(15) self.manager1.propagate_tx(tx5) self.clock.advance(15) @@ -259,7 +260,7 @@ def test_double_spending_propagation(self): tx6.weight = 1 tx6.parents = [tx4.hash, tx5.hash] tx6.timestamp = int(self.clock.seconds()) - tx6.resolve() + self.manager1.cpu_mining_service.resolve(tx6) self.clock.advance(15) self.manager1.propagate_tx(tx6) self.clock.advance(15) @@ -280,7 +281,7 @@ def test_double_spending_propagation(self): tx7.weight = 10 tx7.parents = [tx4.hash, tx5.hash] tx7.timestamp = int(self.clock.seconds()) - tx7.resolve() + self.manager1.cpu_mining_service.resolve(tx7) self.clock.advance(15) self.manager1.propagate_tx(tx7, False) self.clock.advance(15) diff --git a/tests/p2p/test_get_best_blockchain.py b/tests/p2p/test_get_best_blockchain.py index 806444be0..11e71db34 100644 --- a/tests/p2p/test_get_best_blockchain.py +++ b/tests/p2p/test_get_best_blockchain.py @@ -18,6 +18,8 @@ class BaseGetBestBlockchainTestCase(SimulatorTestCase): + seed_config = 6 + def _send_cmd(self, proto, cmd, payload=None): if not payload: line = '{}\r\n'.format(cmd) diff --git a/tests/p2p/test_peer_id.py b/tests/p2p/test_peer_id.py index 8cb20dca8..b9add5faa 100644 --- a/tests/p2p/test_peer_id.py +++ b/tests/p2p/test_peer_id.py @@ -2,8 +2,6 @@ import shutil import tempfile -from twisted.internet.defer import inlineCallbacks - from hathor.conf import HathorSettings from hathor.p2p.peer_id import InvalidPeerIdException, PeerId from hathor.p2p.peer_storage import PeerStorage @@ -212,8 +210,7 @@ def test_retry_logic(self): class BasePeerIdTest(unittest.TestCase): __test__ = False - @inlineCallbacks - def test_validate_entrypoint(self): + async def test_validate_entrypoint(self): manager = self.create_peer('testnet', unlock_wallet=False) peer_id = manager.my_peer peer_id.entrypoints = ['tcp://127.0.0.1:40403'] @@ -221,15 +218,15 @@ def test_validate_entrypoint(self): # we consider that we are starting the connection to the peer protocol = manager.connections.client_factory.buildProtocol('127.0.0.1') protocol.connection_string = 'tcp://127.0.0.1:40403' - result = yield peer_id.validate_entrypoint(protocol) + result = await peer_id.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI peer_id.entrypoints = ['uri_name'] - result = yield peer_id.validate_entrypoint(protocol) + result = await peer_id.validate_entrypoint(protocol) self.assertTrue(result) # test invalid. DNS in test mode will resolve to '127.0.0.1:40403' protocol.connection_string = 'tcp://45.45.45.45:40403' - result = yield peer_id.validate_entrypoint(protocol) + result = await peer_id.validate_entrypoint(protocol) self.assertFalse(result) # now test when receiving the connection - i.e. the peer starts it @@ -242,11 +239,11 @@ def getPeer(self): Peer = namedtuple('Peer', 'host') return Peer(host='127.0.0.1') protocol.transport = FakeTransport() - result = yield peer_id.validate_entrypoint(protocol) + result = await peer_id.validate_entrypoint(protocol) self.assertTrue(result) # if entrypoint is an URI peer_id.entrypoints = ['uri_name'] - result = yield peer_id.validate_entrypoint(protocol) + result = await peer_id.validate_entrypoint(protocol) self.assertTrue(result) diff --git a/tests/p2p/test_protocol.py b/tests/p2p/test_protocol.py index ae2a10a75..0cf572ec6 100644 --- a/tests/p2p/test_protocol.py +++ b/tests/p2p/test_protocol.py @@ -411,18 +411,6 @@ def test_two_connections(self): self.assertAndStepConn(self.conn, b'^GET-TIPS') self.assertAndStepConn(self.conn, b'^PING') - for _ in range(20): - self.assertAndStepConn(self.conn, b'^GET-BEST-BLOCKCHAIN') - - self.assertAndStepConn(self.conn, b'^GET-PEERS', b'^GET-PEERS') - self.assertAndStepConn(self.conn, b'^GET-BEST-BLOCKCHAIN', b'^GET-BEST-BLOCKCHAIN') - self.assertAndStepConn(self.conn, b'^GET-PEERS', b'^GET-PEERS') - self.assertAndStepConn(self.conn, b'^PEERS', b'^GET-BEST-BLOCKCHAIN') - self.assertAndStepConn(self.conn, b'^GET-BEST-BLOCKCHAIN', b'^TIPS') - self.assertAndStepConn(self.conn, b'^TIPS', b'^TIPS') - self.assertAndStepConn(self.conn, b'^TIPS', b'^TIPS-END') - self.assertAndStepConn(self.conn, b'^TIPS-END', b'^PONG') - self.assertAndStepConn(self.conn, b'^PONG', b'^BEST-BLOCKCHAIN') self.assertIsConnected() @inlineCallbacks @@ -437,7 +425,11 @@ def test_get_data(self): self.assertAndStepConn(self.conn, b'^RELAY') self.assertIsConnected() missing_tx = '00000000228dfcd5dec1c9c6263f6430a5b4316bb9e3decb9441a6414bfd8697' - payload = {'until_first_block': missing_tx, 'start_from': [settings.GENESIS_BLOCK_HASH.hex()]} + payload = { + 'first_block_hash': missing_tx, + 'last_block_hash': missing_tx, + 'start_from': [settings.GENESIS_BLOCK_HASH.hex()] + } yield self._send_cmd(self.conn.proto1, 'GET-TRANSACTIONS-BFS', json_dumps(payload)) self._check_result_only_cmd(self.conn.peek_tr1_value(), b'NOT-FOUND') self.conn.run_one_step() diff --git a/tests/p2p/test_split_brain.py b/tests/p2p/test_split_brain.py index 804377f99..68ee24609 100644 --- a/tests/p2p/test_split_brain.py +++ b/tests/p2p/test_split_brain.py @@ -1,12 +1,13 @@ import pytest from mnemonic import Mnemonic -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import TestMode from hathor.graphviz import GraphvizVisualizer from hathor.simulator import FakeConnection +from hathor.simulator.utils import add_new_block from hathor.wallet import HDWallet from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_block, add_new_double_spending, add_new_transactions +from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions class BaseHathorSyncMethodsTestCase(unittest.TestCase): @@ -24,8 +25,8 @@ def create_peer(self, network, unlock_wallet=True): wallet = HDWallet(gap_limit=2) wallet._manually_initialize() - _set_test_mode(TestMode.TEST_ALL_WEIGHT) manager = super().create_peer(network, wallet=wallet) + manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT manager.avg_time_between_blocks = 64 # Don't use it anywhere else. It is unsafe to generate mnemonic words like this. @@ -293,7 +294,7 @@ def test_split_brain_only_blocks_bigger_score(self): # will be bigger than the other one b = add_new_block(manager2, advance_clock=1, propagate=False) b.weight = 5 - b.resolve() + manager2.cpu_mining_service.resolve(b) manager2.propagate_tx(b) manager2_blocks += 1 diff --git a/tests/p2p/test_sync.py b/tests/p2p/test_sync.py index ae8af2bb6..bad0f654f 100644 --- a/tests/p2p/test_sync.py +++ b/tests/p2p/test_sync.py @@ -43,7 +43,7 @@ def _add_new_tx(self, address, value): tx.storage = self.manager1.tx_storage tx.weight = 10 tx.parents = self.manager1.get_new_tx_parents() - tx.resolve() + self.manager1.cpu_mining_service.resolve(tx) self.manager1.verification_service.verify(tx) self.manager1.propagate_tx(tx) self.clock.advance(10) @@ -60,7 +60,7 @@ def _add_new_transactions(self, num_txs): def _add_new_block(self, propagate=True): block = self.manager1.generate_mining_block() - self.assertTrue(block.resolve()) + self.assertTrue(self.manager1.cpu_mining_service.resolve(block)) self.manager1.verification_service.verify(block) self.manager1.on_new_tx(block, propagate_to_peers=propagate) self.clock.advance(10) @@ -268,7 +268,7 @@ def test_downloader(self): self.assertTrue(isinstance(conn.proto1.state, PeerIdState)) self.assertTrue(isinstance(conn.proto2.state, PeerIdState)) - downloader = conn.proto2.connections._sync_factories[SyncVersion.V1_1].get_downloader() + downloader = conn.proto2.connections.get_sync_factory(SyncVersion.V1_1).get_downloader() node_sync1 = NodeSyncTimestamp(conn.proto1, downloader, reactor=conn.proto1.node.reactor) node_sync1.start() @@ -361,7 +361,7 @@ def _downloader_bug_setup(self): # create the peer that will experience the bug self.manager_bug = self.create_peer(self.network) - self.downloader = self.manager_bug.connections._sync_factories[SyncVersion.V1_1].get_downloader() + self.downloader = self.manager_bug.connections.get_sync_factory(SyncVersion.V1_1).get_downloader() self.downloader.window_size = 1 self.conn1 = FakeConnection(self.manager_bug, self.manager1) self.conn2 = FakeConnection(self.manager_bug, self.manager2) @@ -503,9 +503,9 @@ def test_sync_metadata(self): # check they have the same consensus node_sync1 = conn.proto1.state.sync_agent node_sync2 = conn.proto2.state.sync_agent - self.assertEqual(node_sync1.peer_height, height) - self.assertEqual(node_sync1.synced_height, height) - self.assertEqual(node_sync2.peer_height, height) + self.assertEqual(node_sync1.peer_best_block.height, height) + self.assertEqual(node_sync1.synced_block.height, height) + self.assertEqual(node_sync2.peer_best_block.height, height) # 3 genesis + blocks + 8 txs self.assertEqual(self.manager1.tx_storage.get_vertices_count(), height + 11) self.assertEqual(manager2.tx_storage.get_vertices_count(), height + 11) @@ -527,14 +527,14 @@ def test_tx_propagation_nat_peers(self): node_sync1 = self.conn1.proto1.state.sync_agent self.assertEqual(self.manager1.tx_storage.latest_timestamp, self.manager2.tx_storage.latest_timestamp) - self.assertEqual(node_sync1.peer_height, node_sync1.synced_height) - self.assertEqual(node_sync1.peer_height, self.manager1.tx_storage.get_height_best_block()) + self.assertEqual(node_sync1.peer_best_block, node_sync1.synced_block) + self.assertEqual(node_sync1.peer_best_block.height, self.manager1.tx_storage.get_height_best_block()) self.assertConsensusEqual(self.manager1, self.manager2) node_sync2 = self.conn2.proto1.state.sync_agent self.assertEqual(self.manager2.tx_storage.latest_timestamp, self.manager3.tx_storage.latest_timestamp) - self.assertEqual(node_sync2.peer_height, node_sync2.synced_height) - self.assertEqual(node_sync2.peer_height, self.manager2.tx_storage.get_height_best_block()) + self.assertEqual(node_sync2.peer_best_block, node_sync2.synced_block) + self.assertEqual(node_sync2.peer_best_block.height, self.manager2.tx_storage.get_height_best_block()) self.assertConsensusEqual(self.manager2, self.manager3) def test_block_sync_new_blocks_and_txs(self): @@ -560,8 +560,8 @@ def test_block_sync_new_blocks_and_txs(self): node_sync = conn.proto1.state.sync_agent self.assertEqual(self.manager1.tx_storage.latest_timestamp, manager2.tx_storage.latest_timestamp) - self.assertEqual(node_sync.peer_height, node_sync.synced_height) - self.assertEqual(node_sync.peer_height, self.manager1.tx_storage.get_height_best_block()) + self.assertEqual(node_sync.peer_best_block, node_sync.synced_block) + self.assertEqual(node_sync.peer_best_block.height, self.manager1.tx_storage.get_height_best_block()) self.assertConsensusEqual(self.manager1, manager2) self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) @@ -581,8 +581,8 @@ def test_block_sync_many_new_blocks(self): self.clock.advance(1) node_sync = conn.proto1.state.sync_agent - self.assertEqual(node_sync.peer_height, node_sync.synced_height) - self.assertEqual(node_sync.peer_height, self.manager1.tx_storage.get_height_best_block()) + self.assertEqual(node_sync.peer_best_block, node_sync.synced_block) + self.assertEqual(node_sync.peer_best_block.height, self.manager1.tx_storage.get_height_best_block()) self.assertConsensusEqual(self.manager1, manager2) self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) @@ -602,8 +602,8 @@ def test_block_sync_new_blocks(self): self.clock.advance(1) node_sync = conn.proto1.state.sync_agent - self.assertEqual(node_sync.peer_height, node_sync.synced_height) - self.assertEqual(node_sync.peer_height, self.manager1.tx_storage.get_height_best_block()) + self.assertEqual(node_sync.peer_best_block, node_sync.synced_block) + self.assertEqual(node_sync.peer_best_block.height, self.manager1.tx_storage.get_height_best_block()) self.assertConsensusEqual(self.manager1, manager2) self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) @@ -664,9 +664,9 @@ def test_full_sync(self): node_sync1 = conn.proto1.state.sync_agent node_sync2 = conn.proto2.state.sync_agent - self.assertEqual(node_sync1.peer_height, common_height) - self.assertEqual(node_sync1.synced_height, common_height) - self.assertEqual(node_sync2.peer_height, common_height) + self.assertEqual(node_sync1.peer_best_block.height, common_height) + self.assertEqual(node_sync1.synced_block.height, common_height) + self.assertEqual(node_sync2.peer_best_block.height, common_height) self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) self.assertConsensusEqual(self.manager1, manager2) @@ -715,9 +715,9 @@ def test_block_sync_checkpoints(self): node_sync1 = conn.proto1.state.sync_agent node_sync2 = conn.proto2.state.sync_agent - self.assertEqual(node_sync1.peer_height, TOTAL_BLOCKS) - self.assertEqual(node_sync1.synced_height, TOTAL_BLOCKS) - self.assertEqual(node_sync2.peer_height, len(blocks)) + self.assertEqual(node_sync1.peer_best_block.height, TOTAL_BLOCKS) + self.assertEqual(node_sync1.synced_block.height, TOTAL_BLOCKS) + self.assertEqual(node_sync2.peer_best_block.height, len(blocks)) self.assertConsensusValid(self.manager1) self.assertConsensusValid(manager2) @@ -738,8 +738,8 @@ def test_block_sync_only_genesis(self): self.clock.advance(1) node_sync = conn.proto1.state.sync_agent - self.assertEqual(node_sync.synced_height, 0) - self.assertEqual(node_sync.peer_height, 0) + self.assertEqual(node_sync.synced_block.height, 0) + self.assertEqual(node_sync.peer_best_block.height, 0) self.assertEqual(self.manager1.tx_storage.get_vertices_count(), 3) self.assertEqual(manager2.tx_storage.get_vertices_count(), 3) diff --git a/tests/p2p/test_sync_enabled.py b/tests/p2p/test_sync_enabled.py index a62f9c041..a352c08a0 100644 --- a/tests/p2p/test_sync_enabled.py +++ b/tests/p2p/test_sync_enabled.py @@ -52,13 +52,20 @@ def test_sync_rotate(self): self.simulator.run(600) + ready = set(conn for conn in connections if conn.proto1.is_state(conn.proto1.PeerState.READY)) + self.assertEqual(len(ready), len(other_managers)) + enabled = set(conn for conn in connections if conn.proto1.is_sync_enabled()) - self.assertTrue(len(enabled), 3) + self.assertEqual(len(enabled), 3) manager1.connections._sync_rotate_if_needed(force=True) enabled2 = set(conn for conn in connections if conn.proto1.is_sync_enabled()) - self.assertTrue(len(enabled2), 3) - # Chance of false positive: 1/comb(20, 3) = 0.0008771929824561404 + self.assertEqual(len(enabled2), 3) + if enabled == enabled2: + manager1.connections._sync_rotate_if_needed(force=True) + enabled2 = set(conn for conn in connections if conn.proto1.is_sync_enabled()) + self.assertEqual(len(enabled2), 3) + # Chance of false positive: (1/comb(15, 3))**2 = 0.00000483 self.assertNotEqual(enabled, enabled2) diff --git a/tests/p2p/test_sync_mempool.py b/tests/p2p/test_sync_mempool.py index ac4fc9bb1..f2a0219b3 100644 --- a/tests/p2p/test_sync_mempool.py +++ b/tests/p2p/test_sync_mempool.py @@ -31,7 +31,7 @@ def _add_new_tx(self, address, value): tx.storage = self.manager1.tx_storage tx.weight = 10 tx.parents = self.manager1.get_new_tx_parents() - tx.resolve() + self.manager1.cpu_mining_service.resolve(tx) self.manager1.verification_service.verify(tx) self.manager1.propagate_tx(tx) self.clock.advance(10) @@ -48,7 +48,7 @@ def _add_new_transactions(self, num_txs): def _add_new_block(self, propagate=True): block = self.manager1.generate_mining_block() - self.assertTrue(block.resolve()) + self.assertTrue(self.manager1.cpu_mining_service.resolve(block)) self.manager1.verification_service.verify(block) self.manager1.on_new_tx(block, propagate_to_peers=propagate) self.clock.advance(10) diff --git a/tests/p2p/test_sync_v2.py b/tests/p2p/test_sync_v2.py index 0a9ef50bc..e393431c3 100644 --- a/tests/p2p/test_sync_v2.py +++ b/tests/p2p/test_sync_v2.py @@ -1,10 +1,22 @@ +import base64 +import re + import pytest +from twisted.internet.defer import inlineCallbacks, succeed from twisted.python.failure import Failure from hathor.conf import HathorSettings +from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId +from hathor.p2p.sync_v2.agent import _HeightInfo from hathor.simulator import FakeConnection -from hathor.simulator.trigger import StopAfterNMinedBlocks, StopAfterNTransactions, StopWhenTrue, Trigger +from hathor.simulator.trigger import ( + StopAfterNMinedBlocks, + StopAfterNTransactions, + StopWhenSendLineMatch, + StopWhenTrue, + Trigger, +) from hathor.transaction.storage.traversal import DFSWalk from tests.simulation.base import SimulatorTestCase from tests.utils import HAS_ROCKSDB @@ -15,6 +27,8 @@ class BaseRandomSimulatorTestCase(SimulatorTestCase): __test__ = True + seed_config = 2 + def _get_partial_blocks(self, tx_storage): with tx_storage.allow_partially_validated_context(): partial_blocks = set() @@ -68,9 +82,6 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo self.assertNotEqual(b1.hash, b2.hash) - partial_blocks = self._get_partial_blocks(manager2.tx_storage) - self.assertGreater(len(partial_blocks), 0) - for _ in range(20): print() print('Stopping manager2...') @@ -106,8 +117,6 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo builder3.use_tx_storage_cache() manager3 = self.simulator.create_peer(builder3) - self.assertEqual(partial_blocks, self._get_partial_blocks(manager3.tx_storage)) - self.assertTrue(manager3.tx_storage.indexes.deps.has_needed_tx()) conn13 = FakeConnection(manager1, manager3, latency=0.05) self.simulator.add_connection(conn13) @@ -220,19 +229,21 @@ def test_exceeds_streaming_and_mempool_limits(self) -> None: # Let the connection start to sync. self.simulator.run(1) + new_streaming_limit = 30 + # Change manager1 default streaming and mempool limits. sync1 = conn12.proto1.state.sync_agent - sync1.DEFAULT_STREAMING_LIMIT = 30 - sync1.mempool_manager.MAX_STACK_LENGTH = 30 - self.assertIsNone(sync1.blockchain_streaming) - self.assertIsNone(sync1.transactions_streaming) + sync1.DEFAULT_STREAMING_LIMIT = new_streaming_limit + sync1.mempool_manager.MAX_STACK_LENGTH = new_streaming_limit + self.assertIsNone(sync1._blk_streaming_server) + self.assertIsNone(sync1._tx_streaming_server) # Change manager2 default streaming and mempool limits. sync2 = conn12.proto2.state.sync_agent - sync2.DEFAULT_STREAMING_LIMIT = 50 - sync2.mempool_manager.MAX_STACK_LENGTH = 50 - self.assertIsNone(sync2.blockchain_streaming) - self.assertIsNone(sync2.transactions_streaming) + sync2.DEFAULT_STREAMING_LIMIT = new_streaming_limit + sync2.mempool_manager.MAX_STACK_LENGTH = new_streaming_limit + self.assertIsNone(sync2._blk_streaming_server) + self.assertIsNone(sync2._tx_streaming_server) # Run until fully synced. # trigger = StopWhenTrue(sync2.is_synced) @@ -241,3 +252,129 @@ def test_exceeds_streaming_and_mempool_limits(self) -> None: self.assertEqual(manager1.tx_storage.get_vertices_count(), manager2.tx_storage.get_vertices_count()) self.assertConsensusEqualSyncV2(manager1, manager2) + + def _prepare_sync_v2_find_best_common_block_reorg(self): + manager1 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) + manager1.allow_mining_without_peers() + miner1 = self.simulator.create_miner(manager1, hashpower=10e6) + miner1.start() + self.assertTrue(self.simulator.run(24 * 3600)) + miner1.stop() + + manager2 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) + conn12 = FakeConnection(manager1, manager2, latency=0.05) + self.simulator.add_connection(conn12) + + self.assertTrue(self.simulator.run(3600)) + return conn12 + + @inlineCallbacks + def test_sync_v2_find_best_common_block_reorg_1(self): + conn12 = self._prepare_sync_v2_find_best_common_block_reorg() + sync_agent = conn12._proto1.state.sync_agent + rng = conn12.manager2.rng + + my_best_block = sync_agent.get_my_best_block() + peer_best_block = sync_agent.peer_best_block + + fake_peer_best_block = _HeightInfo(my_best_block.height + 3, rng.randbytes(32)) + reorg_height = peer_best_block.height - 50 + + def fake_get_peer_block_hashes(heights): + # return empty as soon as the search lowest height is not the genesis + if heights[0] != 0: + return [] + + # simulate a reorg + response = [] + for h in heights: + if h < reorg_height: + vertex_id = conn12.manager2.tx_storage.indexes.height.get(h) + else: + vertex_id = rng.randbytes(32) + response.append(_HeightInfo(height=h, id=vertex_id)) + return succeed(response) + + sync_agent.get_peer_block_hashes = fake_get_peer_block_hashes + common_block_info = yield sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) + self.assertIsNone(common_block_info) + + @inlineCallbacks + def test_sync_v2_find_best_common_block_reorg_2(self): + conn12 = self._prepare_sync_v2_find_best_common_block_reorg() + sync_agent = conn12._proto1.state.sync_agent + rng = conn12.manager2.rng + + my_best_block = sync_agent.get_my_best_block() + peer_best_block = sync_agent.peer_best_block + + fake_peer_best_block = _HeightInfo(my_best_block.height + 3, rng.randbytes(32)) + reorg_height = peer_best_block.height - 50 + + def fake_get_peer_block_hashes(heights): + if heights[0] != 0: + return succeed([ + _HeightInfo(height=h, id=rng.randbytes(32)) + for h in heights + ]) + + # simulate a reorg + response = [] + for h in heights: + if h < reorg_height: + vertex_id = conn12.manager2.tx_storage.indexes.height.get(h) + else: + vertex_id = rng.randbytes(32) + response.append(_HeightInfo(height=h, id=vertex_id)) + return succeed(response) + + sync_agent.get_peer_block_hashes = fake_get_peer_block_hashes + common_block_info = yield sync_agent.find_best_common_block(my_best_block, fake_peer_best_block) + self.assertIsNone(common_block_info) + + def test_multiple_unexpected_txs(self) -> None: + manager1 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) + manager1.allow_mining_without_peers() + + # mine some blocks (10, could be any amount) + miner1 = self.simulator.create_miner(manager1, hashpower=10e6) + miner1.start() + self.assertTrue(self.simulator.run(3 * 3600, trigger=StopAfterNMinedBlocks(miner1, quantity=100))) + miner1.stop() + + # generate some transactions (10, could by any amount >1) + gen_tx1 = self.simulator.create_tx_generator(manager1, rate=3., hashpower=10e9, ignore_no_funds=True) + gen_tx1.start() + self.assertTrue(self.simulator.run(3 * 3600, trigger=StopAfterNTransactions(gen_tx1, quantity=10))) + gen_tx1.stop() + + # mine some blocks (2 to be sure, 1 should be enough) + miner1.start() + self.assertTrue(self.simulator.run(3 * 3600, trigger=StopAfterNMinedBlocks(miner1, quantity=2))) + miner1.stop() + + # create a new peer and run sync and stop when it requests transactions, so we can inject it with invalid ones + manager2 = self.create_peer(enable_sync_v1=False, enable_sync_v2=True) + conn12 = FakeConnection(manager1, manager2, latency=0.05) + self.simulator.add_connection(conn12) + regex = re.compile(rf'{ProtocolMessages.GET_TRANSACTIONS_BFS.value} '.encode('ascii')) + self.assertTrue(self.simulator.run(2 * 60, trigger=StopWhenSendLineMatch(conn12._proto2, regex))) + + # make up some transactions that the node isn't expecting + best_block = manager1.tx_storage.get_best_block() + existing_tx = manager1.tx_storage.get_transaction(list(best_block.get_tx_parents())[0]) + fake_txs = [] + for i in range(3): + fake_tx = existing_tx.clone() + fake_tx.timestamp += 1 + i # incrementally add timestamp so something is guaranteed to change + manager1.cpu_mining_service.resolve(fake_tx) + fake_txs.append(fake_tx) + + # send fake transactions to manager2, before the fix the first should fail with no issue, but the second would + # end up on an AlreadyCalledError because the deferred.errback will be called twice + for fake_tx in fake_txs: + sync_node2 = conn12.proto2.state.sync_agent + sync_node2.handle_transaction(base64.b64encode(fake_tx.get_struct()).decode()) + + # force the processing of async code, nothing should break + self.simulator.run(0) diff --git a/tests/p2p/test_twin_tx.py b/tests/p2p/test_twin_tx.py index e326e5756..9e5e8857a 100644 --- a/tests/p2p/test_twin_tx.py +++ b/tests/p2p/test_twin_tx.py @@ -1,8 +1,9 @@ from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.wallet.base_wallet import WalletOutputInfo from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_double_spending +from tests.utils import add_blocks_unlock_reward, add_new_double_spending class BaseTwinTransactionTestCase(unittest.TestCase): @@ -36,12 +37,12 @@ def test_twin_tx(self): tx1.weight = 10 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) # Change of parents only, so it's a twin tx2 = Transaction.create_from_struct(tx1.get_struct()) tx2.parents = [tx1.parents[1], tx1.parents[0]] - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.assertNotEqual(tx1.hash, tx2.hash) # The same as tx1 but with one input different, so it's not a twin @@ -50,7 +51,7 @@ def test_twin_tx(self): tx3.weight = tx1.weight tx3.parents = tx1.parents tx3.timestamp = tx1.timestamp - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) self.manager.propagate_tx(tx1) meta1 = tx1.get_metadata() diff --git a/tests/p2p/test_whitelist.py b/tests/p2p/test_whitelist.py index 7f1b28759..7d408e71b 100644 --- a/tests/p2p/test_whitelist.py +++ b/tests/p2p/test_whitelist.py @@ -14,10 +14,10 @@ def test_sync_v11_whitelist_no_no(self): network = 'testnet' manager1 = self.create_peer(network) - self.assertEqual(set(manager1.connections._sync_factories.keys()), {SyncVersion.V1_1}) + self.assertEqual(manager1.connections.get_enabled_sync_versions(), {SyncVersion.V1_1}) manager2 = self.create_peer(network) - self.assertEqual(set(manager2.connections._sync_factories.keys()), {SyncVersion.V1_1}) + self.assertEqual(manager2.connections.get_enabled_sync_versions(), {SyncVersion.V1_1}) conn = FakeConnection(manager1, manager2) self.assertFalse(conn.tr1.disconnecting) @@ -36,10 +36,10 @@ def test_sync_v11_whitelist_yes_no(self): network = 'testnet' manager1 = self.create_peer(network) - self.assertEqual(set(manager1.connections._sync_factories.keys()), {SyncVersion.V1_1}) + self.assertEqual(manager1.connections.get_enabled_sync_versions(), {SyncVersion.V1_1}) manager2 = self.create_peer(network) - self.assertEqual(set(manager2.connections._sync_factories.keys()), {SyncVersion.V1_1}) + self.assertEqual(manager2.connections.get_enabled_sync_versions(), {SyncVersion.V1_1}) manager1.peers_whitelist.append(manager2.my_peer.id) @@ -60,10 +60,10 @@ def test_sync_v11_whitelist_yes_yes(self): network = 'testnet' manager1 = self.create_peer(network) - self.assertEqual(set(manager1.connections._sync_factories.keys()), {SyncVersion.V1_1}) + self.assertEqual(manager1.connections.get_enabled_sync_versions(), {SyncVersion.V1_1}) manager2 = self.create_peer(network) - self.assertEqual(set(manager2.connections._sync_factories.keys()), {SyncVersion.V1_1}) + self.assertEqual(manager2.connections.get_enabled_sync_versions(), {SyncVersion.V1_1}) manager1.peers_whitelist.append(manager2.my_peer.id) manager2.peers_whitelist.append(manager1.my_peer.id) diff --git a/tests/pubsub/test_pubsub.py b/tests/pubsub/test_pubsub.py index fc41fca44..2d3d1ef62 100644 --- a/tests/pubsub/test_pubsub.py +++ b/tests/pubsub/test_pubsub.py @@ -1,47 +1,8 @@ -import threading -import time - -from twisted.internet import threads -from twisted.python import threadable - from hathor.pubsub import HathorEvents, PubSubManager -from hathor.util import reactor -from tests import unittest - - -class PubSubTestCase(unittest.TestCase): - def _waitForThread(self): - """ - The reactor's threadpool is only available when the reactor is running, - so to have a sane behavior during the tests we make a dummy - L{threads.deferToThread} call. - """ - # copied from twisted/test/test_threads.py [yan] - return threads.deferToThread(time.sleep, 0) - - def test_pubsub_thread(self): - """ Test pubsub function is always called in reactor thread. - """ - def _on_new_event(*args): - self.assertTrue(threadable.isInIOThread()) - - pubsub = PubSubManager(reactor) - pubsub.subscribe(HathorEvents.NETWORK_NEW_TX_ACCEPTED, _on_new_event) - - def cb(_ignore): - waiter = threading.Event() - - def threadedFunc(): - self.assertFalse(threadable.isInIOThread()) - pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED) - waiter.set() - - reactor.callInThread(threadedFunc) - waiter.wait(20) - self.assertTrue(waiter.isSet()) +from tests.unittest import TestCase - return self._waitForThread().addCallback(cb) +class PubSubTestCase(TestCase): def test_duplicate_subscribe(self): def noop(): pass diff --git a/tests/resources/base_resource.py b/tests/resources/base_resource.py index b7aebc16b..ce9a84c07 100644 --- a/tests/resources/base_resource.py +++ b/tests/resources/base_resource.py @@ -2,7 +2,7 @@ from twisted.web import server from twisted.web.test.requesthelper import DummyRequest -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import TestMode from hathor.util import json_dumpb, json_loadb from tests import unittest @@ -19,7 +19,7 @@ def setUp(self, *, utxo_index: bool = False, unlock_wallet: bool = True) -> None unlock_wallet=unlock_wallet ) self.manager.allow_mining_without_peers() - _set_test_mode(TestMode.TEST_ALL_WEIGHT) + self.manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT def tearDown(self): return self.manager.stop() diff --git a/tests/resources/event/test_event.py b/tests/resources/event/test_event.py index 40423d6dc..fbf32240e 100644 --- a/tests/resources/event/test_event.py +++ b/tests/resources/event/test_event.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from unittest.mock import Mock import pytest @@ -44,17 +45,14 @@ def data(): def test_get_events(web, data): response = web.get('event').result result = response.json_value() - expected = { - 'events': [ - {'peer_id': '123', 'id': 0, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, - 'group_id': None}, - {'peer_id': '123', 'id': 1, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, - 'group_id': None}, - {'peer_id': '123', 'id': 2, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, - 'group_id': None} + expected = dict( + latest_event_id=2, + events=[ + dict(id=0, timestamp=123456.0, type='VERTEX_METADATA_CHANGED', data=data, group_id=None), + dict(id=1, timestamp=123456.0, type='VERTEX_METADATA_CHANGED', data=data, group_id=None), + dict(id=2, timestamp=123456.0, type='VERTEX_METADATA_CHANGED', data=data, group_id=None), ], - 'latest_event_id': 2 - } + ) assert result == expected @@ -62,13 +60,12 @@ def test_get_events(web, data): def test_get_events_with_size(web, data): response = web.get('event', {b'size': b'1'}) result = response.result.json_value() - expected = { - 'events': [ - {'peer_id': '123', 'id': 0, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, - 'group_id': None} + expected = dict( + latest_event_id=2, + events=[ + dict(id=0, timestamp=123456.0, type='VERTEX_METADATA_CHANGED', data=data, group_id=None), ], - 'latest_event_id': 2 - } + ) assert result == expected @@ -76,15 +73,13 @@ def test_get_events_with_size(web, data): def test_get_events_with_last_ack_event_id(web, data): response = web.get('event', {b'last_ack_event_id': b'0'}) result = response.result.json_value() - expected = { - 'events': [ - {'peer_id': '123', 'id': 1, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, - 'group_id': None}, - {'peer_id': '123', 'id': 2, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, - 'group_id': None} + expected = dict( + latest_event_id=2, + events=[ + dict(id=1, timestamp=123456.0, type='VERTEX_METADATA_CHANGED', data=data, group_id=None), + dict(id=2, timestamp=123456.0, type='VERTEX_METADATA_CHANGED', data=data, group_id=None), ], - 'latest_event_id': 2 - } + ) assert result == expected @@ -92,12 +87,11 @@ def test_get_events_with_last_ack_event_id(web, data): def test_get_events_with_size_and_last_ack_event_id(web, data): response = web.get('event', {b'last_ack_event_id': b'0', b'size': b'1'}) result = response.result.json_value() - expected = { - 'events': [ - {'peer_id': '123', 'id': 1, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, - 'group_id': None}, + expected = dict( + latest_event_id=2, + events=[ + dict(id=1, timestamp=123456.0, type='VERTEX_METADATA_CHANGED', data=data, group_id=None), ], - 'latest_event_id': 2 - } + ) assert result == expected diff --git a/tests/resources/healthcheck/test_healthcheck.py b/tests/resources/healthcheck/test_healthcheck.py new file mode 100644 index 000000000..e40fb2a76 --- /dev/null +++ b/tests/resources/healthcheck/test_healthcheck.py @@ -0,0 +1,169 @@ +from unittest.mock import ANY + +from twisted.internet.defer import inlineCallbacks + +from hathor.healthcheck.resources.healthcheck import HealthcheckResource +from hathor.manager import HathorManager +from hathor.simulator import FakeConnection +from tests import unittest +from tests.resources.base_resource import StubSite, _BaseResourceTest +from tests.utils import add_new_blocks + + +class BaseHealthcheckReadinessTest(_BaseResourceTest._ResourceTest): + __test__ = False + + def setUp(self): + super().setUp() + self.web = StubSite(HealthcheckResource(self.manager)) + + @inlineCallbacks + def test_get_no_recent_activity(self): + """Scenario where the node doesn't have a recent block + """ + response = yield self.web.get('/health') + data = response.json_value() + + self.assertEqual(response.responseCode, 503) + self.assertEqual(data, { + 'status': 'fail', + 'description': ANY, + 'checks': { + 'sync': [{ + 'componentType': 'internal', + 'componentName': 'sync', + 'status': 'fail', + 'output': HathorManager.UnhealthinessReason.NO_RECENT_ACTIVITY, + 'time': ANY + }] + } + }) + + @inlineCallbacks + def test_strict_status_code(self): + """Make sure the 'strict_status_code' parameter is working. + The node should return 200 even if it's not ready. + """ + response = yield self.web.get('/health', {b'strict_status_code': b'1'}) + data = response.json_value() + + self.assertEqual(response.responseCode, 200) + self.assertEqual(data, { + 'status': 'fail', + 'description': ANY, + 'checks': { + 'sync': [{ + 'componentType': 'internal', + 'componentName': 'sync', + 'status': 'fail', + 'output': HathorManager.UnhealthinessReason.NO_RECENT_ACTIVITY, + 'time': ANY + }] + } + }) + + @inlineCallbacks + def test_get_no_connected_peer(self): + """Scenario where the node doesn't have any connected peer + """ + # This will make sure the node has recent activity + add_new_blocks(self.manager, 5) + + self.assertEqual(self.manager.has_recent_activity(), True) + + response = yield self.web.get('/health') + data = response.json_value() + + self.assertEqual(response.responseCode, 503) + self.assertEqual(data, { + 'status': 'fail', + 'description': ANY, + 'checks': { + 'sync': [{ + 'componentType': 'internal', + 'componentName': 'sync', + 'status': 'fail', + 'output': HathorManager.UnhealthinessReason.NO_SYNCED_PEER, + 'time': ANY + }] + } + }) + + @inlineCallbacks + def test_get_peer_out_of_sync(self): + """Scenario where the node is connected with a peer but not synced + """ + # This will make sure the node has recent activity + add_new_blocks(self.manager, 5) + + self.manager2 = self.create_peer('testnet') + self.conn1 = FakeConnection(self.manager, self.manager2) + self.conn1.run_one_step() # HELLO + self.conn1.run_one_step() # PEER-ID + self.conn1.run_one_step() # READY + + self.assertEqual(self.manager2.state, self.manager2.NodeState.READY) + + response = yield self.web.get('/health') + data = response.json_value() + + self.assertEqual(response.responseCode, 503) + self.assertEqual(data, { + 'status': 'fail', + 'description': ANY, + 'checks': { + 'sync': [{ + 'componentType': 'internal', + 'componentName': 'sync', + 'status': 'fail', + 'output': HathorManager.UnhealthinessReason.NO_SYNCED_PEER, + 'time': ANY + }] + } + }) + + @inlineCallbacks + def test_get_ready(self): + """Scenario where the node is ready + """ + self.manager2 = self.create_peer('testnet') + self.conn1 = FakeConnection(self.manager, self.manager2) + + # This will make sure the node has recent activity + add_new_blocks(self.manager, 5) + + # This will make sure the peers are synced + for _ in range(600): + self.conn1.run_one_step(debug=True) + self.clock.advance(0.1) + + response = yield self.web.get('/health') + data = response.json_value() + + self.assertEqual(response.responseCode, 200) + self.assertEqual(data, { + 'status': 'pass', + 'description': ANY, + 'checks': { + 'sync': [{ + 'componentType': 'internal', + 'componentName': 'sync', + 'status': 'pass', + 'output': 'Healthy', + 'time': ANY + }] + } + }) + + +class SyncV1StatusTest(unittest.SyncV1Params, BaseHealthcheckReadinessTest): + __test__ = True + + +class SyncV2StatusTest(unittest.SyncV2Params, BaseHealthcheckReadinessTest): + __test__ = True + + +# sync-bridge should behave like sync-v2 +class SyncBridgeStatusTest(unittest.SyncBridgeParams, SyncV2StatusTest): + pass diff --git a/tests/resources/p2p/test_healthcheck.py b/tests/resources/p2p/test_healthcheck.py deleted file mode 100644 index 90bf1e260..000000000 --- a/tests/resources/p2p/test_healthcheck.py +++ /dev/null @@ -1,95 +0,0 @@ -from twisted.internet.defer import inlineCallbacks - -from hathor.manager import HathorManager -from hathor.p2p.resources.healthcheck import HealthcheckReadinessResource -from hathor.simulator import FakeConnection -from tests import unittest -from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_new_blocks - - -class BaseHealthcheckReadinessTest(_BaseResourceTest._ResourceTest): - __test__ = False - - def setUp(self): - super().setUp() - self.web = StubSite(HealthcheckReadinessResource(self.manager)) - - @inlineCallbacks - def test_get_no_recent_activity(self): - """Scenario where the node doesn't have a recent block - """ - response = yield self.web.get("p2p/readiness") - data = response.json_value() - - self.assertEqual(data['success'], False) - self.assertEqual(data['reason'], HathorManager.UnhealthinessReason.NO_RECENT_ACTIVITY) - - @inlineCallbacks - def test_get_no_connected_peer(self): - """Scenario where the node doesn't have any connected peer - """ - # This will make sure the node has recent activity - add_new_blocks(self.manager, 5) - - self.assertEqual(self.manager.has_recent_activity(), True) - - response = yield self.web.get("p2p/readiness") - data = response.json_value() - - self.assertEqual(data['success'], False) - self.assertEqual(data['reason'], HathorManager.UnhealthinessReason.NO_SYNCED_PEER) - - @inlineCallbacks - def test_get_peer_out_of_sync(self): - """Scenario where the node is connected with a peer but not synced - """ - # This will make sure the node has recent activity - add_new_blocks(self.manager, 5) - - self.manager2 = self.create_peer('testnet') - self.conn1 = FakeConnection(self.manager, self.manager2) - self.conn1.run_one_step() # HELLO - self.conn1.run_one_step() # PEER-ID - self.conn1.run_one_step() # READY - - self.assertEqual(self.manager2.state, self.manager2.NodeState.READY) - - response = yield self.web.get("p2p/readiness") - data = response.json_value() - - self.assertEqual(data['success'], False) - self.assertEqual(data['reason'], HathorManager.UnhealthinessReason.NO_SYNCED_PEER) - - @inlineCallbacks - def test_get_ready(self): - """Scenario where the node is ready - """ - self.manager2 = self.create_peer('testnet') - self.conn1 = FakeConnection(self.manager, self.manager2) - - # This will make sure the node has recent activity - add_new_blocks(self.manager, 5) - - # This will make sure the peers are synced - for _ in range(600): - self.conn1.run_one_step(debug=True) - self.clock.advance(0.1) - - response = yield self.web.get("p2p/readiness") - data = response.json_value() - - self.assertEqual(data['success'], True) - - -class SyncV1StatusTest(unittest.SyncV1Params, BaseHealthcheckReadinessTest): - __test__ = True - - -class SyncV2StatusTest(unittest.SyncV2Params, BaseHealthcheckReadinessTest): - __test__ = True - - -# sync-bridge should behave like sync-v2 -class SyncBridgeStatusTest(unittest.SyncBridgeParams, SyncV2StatusTest): - pass diff --git a/tests/resources/p2p/test_mining.py b/tests/resources/p2p/test_mining.py index 19e77f40f..b0d559f90 100644 --- a/tests/resources/p2p/test_mining.py +++ b/tests/resources/p2p/test_mining.py @@ -31,7 +31,7 @@ def test_post(self): block_bytes = base64.b64decode(block_bytes_str) block = Block.create_from_struct(block_bytes) block.weight = 4 - block.resolve() + self.manager.cpu_mining_service.resolve(block) block_bytes = bytes(block) block_bytes_str = base64.b64encode(block_bytes).decode('ascii') @@ -56,7 +56,7 @@ def test_post_invalid_data(self): block_bytes = base64.b64decode(block_bytes_str) block = Block.create_from_struct(block_bytes) block.weight = 4 - block.resolve() + self.manager.cpu_mining_service.resolve(block) block_bytes = bytes(block) block_bytes_str = base64.b64encode(block_bytes).decode('ascii') diff --git a/tests/resources/test_mining_info.py b/tests/resources/test_mining_info.py index e744cfca1..cde6fcc28 100644 --- a/tests/resources/test_mining_info.py +++ b/tests/resources/test_mining_info.py @@ -2,9 +2,9 @@ from hathor.conf import HathorSettings from hathor.p2p.resources import MiningInfoResource +from hathor.simulator.utils import add_new_blocks from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_new_blocks settings = HathorSettings() diff --git a/tests/resources/transaction/test_block_at_height.py b/tests/resources/transaction/test_block_at_height.py index 8abbb8cb0..9800b3816 100644 --- a/tests/resources/transaction/test_block_at_height.py +++ b/tests/resources/transaction/test_block_at_height.py @@ -1,9 +1,9 @@ from twisted.internet.defer import inlineCallbacks +from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import BlockAtHeightResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_new_blocks class BaseBlockAtHeightTest(_BaseResourceTest._ResourceTest): diff --git a/tests/resources/transaction/test_create_tx.py b/tests/resources/transaction/test_create_tx.py index 2217ec42b..e52b9c1f3 100644 --- a/tests/resources/transaction/test_create_tx.py +++ b/tests/resources/transaction/test_create_tx.py @@ -2,13 +2,14 @@ from twisted.internet.defer import inlineCallbacks -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import TestMode +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.transaction.resources import CreateTxResource from hathor.transaction.scripts import P2PKH, create_base_script from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_tx +from tests.utils import add_blocks_unlock_reward, add_new_tx class BaseTransactionTest(_BaseResourceTest._ResourceTest): @@ -195,7 +196,7 @@ def test_spend_tx_by_script(self): @inlineCallbacks def test_tx_propagate(self): - _set_test_mode(TestMode.DISABLED) # disable test_mode so the weight is not 1 + self.manager.daa.TEST_MODE = TestMode.DISABLED # disable test_mode so the weight is not 1 src_tx = self.unspent_tx output_address = 'HNXsVtRUmwDCtpcCJUrH4QiHo9kUKx199A' resp = (yield self.web.post('create_tx', { @@ -228,12 +229,12 @@ def test_tx_propagate(self): input_data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx.inputs[0].data = input_data # XXX: tx.resolve is a bit CPU intensive, but not so much as to make this test disabled by default - tx.resolve(False) + self.manager.cpu_mining_service.resolve(tx, update_time=False) self.assertTrue(self.manager.propagate_tx(tx)) @inlineCallbacks def test_tx_propagate_multiple_inputs(self): - _set_test_mode(TestMode.DISABLED) # disable test_mode so the weight is not 1 + self.manager.daa.TEST_MODE = TestMode.DISABLED # disable test_mode so the weight is not 1 output_address = 'HNXsVtRUmwDCtpcCJUrH4QiHo9kUKx199A' resp = (yield self.web.post('create_tx', { 'inputs': [ @@ -275,7 +276,7 @@ def test_tx_propagate_multiple_inputs(self): tx.inputs[1].data = input_data tx.inputs[2].data = input_data # XXX: tx.resolve is a bit CPU intensive, but not so much as to make this test disabled by default - tx.resolve(False) + self.manager.cpu_mining_service.resolve(tx, update_time=False) self.assertTrue(self.manager.propagate_tx(tx)) @inlineCallbacks diff --git a/tests/resources/transaction/test_graphviz.py b/tests/resources/transaction/test_graphviz.py index e2b736927..4c3f0d498 100644 --- a/tests/resources/transaction/test_graphviz.py +++ b/tests/resources/transaction/test_graphviz.py @@ -1,10 +1,11 @@ from twisted.internet.defer import inlineCallbacks +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.transaction.resources import GraphvizFullResource, GraphvizNeighboursResource from tests import unittest from tests.resources.base_resource import StubSite, TestDummyRequest, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_transactions +from tests.utils import add_blocks_unlock_reward, add_new_transactions class BaseGraphvizTest(_BaseResourceTest._ResourceTest): @@ -26,7 +27,7 @@ def setUp(self): self.tx2 = Transaction.create_from_struct(tx.get_struct()) self.tx2.parents = [tx.parents[1], tx.parents[0]] - self.tx2.resolve() + self.manager.cpu_mining_service.resolve(self.tx2) self.manager.propagate_tx(self.tx2) diff --git a/tests/resources/transaction/test_mempool.py b/tests/resources/transaction/test_mempool.py index faeb34de3..c98075b13 100644 --- a/tests/resources/transaction/test_mempool.py +++ b/tests/resources/transaction/test_mempool.py @@ -1,10 +1,11 @@ from twisted.internet.defer import inlineCallbacks from hathor.conf import HathorSettings +from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import MempoolResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_transactions +from tests.utils import add_blocks_unlock_reward, add_new_transactions settings = HathorSettings() diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index 0981794bd..af97d8682 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -1,5 +1,6 @@ from twisted.internet.defer import inlineCallbacks +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.transaction.resources import mining from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest @@ -39,7 +40,7 @@ def test_get_block_template_with_address(self): 'height': 1, 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': [0, 0, 0, 0] + 'feature_activation_bit_counts': None }, 'tokens': [], 'data': '', @@ -72,7 +73,7 @@ def test_get_block_template_without_address(self): 'height': 1, 'min_height': 0, 'first_block': None, - 'feature_activation_bit_counts': [0, 0, 0, 0] + 'feature_activation_bit_counts': None }, 'tokens': [], 'data': '', @@ -95,7 +96,7 @@ def test_get_block_template_and_submit_block(self): resp = yield self.get_block_template.get('', {b'address': b'HC7w4j7mPet49BBN5a2An3XUiPvK6C1TL7'}) data = resp.json_value() block = create_tx_from_dict(data) - block.resolve(False) + CpuMiningService().resolve(block, update_time=False) self.assertTrue(self.manager.propagate_tx(block)) diff --git a/tests/resources/transaction/test_pushtx.py b/tests/resources/transaction/test_pushtx.py index 314e7445f..10392c214 100644 --- a/tests/resources/transaction/test_pushtx.py +++ b/tests/resources/transaction/test_pushtx.py @@ -4,14 +4,16 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput from hathor.transaction.resources import PushTxResource from hathor.transaction.scripts import P2PKH, parse_address_script +from hathor.util import not_none from hathor.wallet.base_wallet import WalletInputInfo, WalletOutputInfo from hathor.wallet.resources import SendTokensResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_tx_with_data_script, create_tokens +from tests.utils import add_blocks_unlock_reward, add_tx_with_data_script, create_tokens settings = HathorSettings() @@ -48,7 +50,7 @@ def get_tx(self, inputs: Optional[list[WalletInputInfo]] = None, max_ts_spent_tx = max(tx.get_spent_tx(txin).timestamp for txin in tx.inputs) tx.timestamp = max(max_ts_spent_tx + 1, int(self.manager.reactor.seconds())) tx.parents = self.manager.get_new_tx_parents(tx.timestamp) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) return tx def push_tx(self, data=None): @@ -92,7 +94,7 @@ def test_push_tx(self) -> Generator: # modify tx so it will be a double spending, then rejected tx.weight += 0.1 - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) tx_hex = tx.get_struct().hex() response_success = yield self.push_tx({'hex_tx': tx_hex}) @@ -101,7 +103,7 @@ def test_push_tx(self) -> Generator: # invalid transaction, without forcing tx.timestamp = 5 - tx.inputs = [TxInput(blocks[1].hash, 0, b'')] + tx.inputs = [TxInput(not_none(blocks[1].hash), 0, b'')] script_type_out = parse_address_script(blocks[1].outputs[0].script) assert script_type_out is not None private_key = self.manager.wallet.get_private_key(script_type_out.address) @@ -183,7 +185,7 @@ def test_script_too_big(self) -> Generator: # Invalid tx (output script is too long) tx.outputs[0].script = b'*' * (settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + 1) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) tx_hex = tx.get_struct().hex() response = yield self.push_tx({'hex_tx': tx_hex}) data = response.json_value() @@ -199,7 +201,7 @@ def test_non_standard_script(self) -> Generator: # Invalid tx (output script is too long) tx.outputs[0].script = b'*' * 5 - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) tx_hex = tx.get_struct().hex() response = yield self.push_tx({'hex_tx': tx_hex}) data = response.json_value() diff --git a/tests/resources/transaction/test_transaction_confirmation.py b/tests/resources/transaction/test_transaction_confirmation.py index 4902a10ad..a04eeb641 100644 --- a/tests/resources/transaction/test_transaction_confirmation.py +++ b/tests/resources/transaction/test_transaction_confirmation.py @@ -1,9 +1,10 @@ from twisted.internet.defer import inlineCallbacks +from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import TransactionAccWeightResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_transactions +from tests.utils import add_blocks_unlock_reward, add_new_transactions class BaseTransactionTest(_BaseResourceTest._ResourceTest): diff --git a/tests/resources/transaction/test_tx.py b/tests/resources/transaction/test_tx.py index c6a2e72d9..4d4344c0e 100644 --- a/tests/resources/transaction/test_tx.py +++ b/tests/resources/transaction/test_tx.py @@ -1,12 +1,13 @@ from twisted.internet.defer import inlineCallbacks +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.transaction.resources import TransactionResource from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.validation_state import ValidationState from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_transactions +from tests.utils import add_blocks_unlock_reward, add_new_transactions class BaseTransactionTest(_BaseResourceTest._ResourceTest): @@ -52,7 +53,7 @@ def test_get_one(self): tx2 = Transaction.create_from_struct(tx.get_struct()) tx2.parents = [tx.parents[1], tx.parents[0]] - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.propagate_tx(tx2) diff --git a/tests/resources/transaction/test_utxo_search.py b/tests/resources/transaction/test_utxo_search.py index 57e4aea28..ecd73c918 100644 --- a/tests/resources/transaction/test_utxo_search.py +++ b/tests/resources/transaction/test_utxo_search.py @@ -2,10 +2,11 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction.resources import UtxoSearchResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks +from tests.utils import add_blocks_unlock_reward settings = HathorSettings() diff --git a/tests/resources/wallet/test_balance.py b/tests/resources/wallet/test_balance.py index 7732caa21..42532c7e7 100644 --- a/tests/resources/wallet/test_balance.py +++ b/tests/resources/wallet/test_balance.py @@ -2,6 +2,7 @@ from twisted.internet.defer import inlineCallbacks +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.resources import MiningResource from hathor.wallet.resources import BalanceResource from tests import unittest @@ -27,7 +28,10 @@ def test_get(self): # Mining new block response_mining = yield self.web_mining.get("mining") data_mining = response_mining.json_value() - block_bytes = resolve_block_bytes(block_bytes=data_mining['block_bytes']) + block_bytes = resolve_block_bytes( + block_bytes=data_mining['block_bytes'], + cpu_mining_service=CpuMiningService() + ) yield self.web_mining.post("mining", {'block_bytes': base64.b64encode(block_bytes).decode('utf-8')}) # Get new balance after block diff --git a/tests/resources/wallet/test_history.py b/tests/resources/wallet/test_history.py index 83bdb26dc..7b7e398c7 100644 --- a/tests/resources/wallet/test_history.py +++ b/tests/resources/wallet/test_history.py @@ -2,6 +2,7 @@ from twisted.internet.defer import inlineCallbacks +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.resources import MiningResource from hathor.wallet.resources import HistoryResource from tests import unittest @@ -22,7 +23,10 @@ def test_get(self): # Mining new block response_mining = yield self.web_mining.get("mining") data_mining = response_mining.json_value() - block_bytes = resolve_block_bytes(block_bytes=data_mining['block_bytes']) + block_bytes = resolve_block_bytes( + block_bytes=data_mining['block_bytes'], + cpu_mining_service=CpuMiningService() + ) yield self.web_mining.post("mining", {'block_bytes': base64.b64encode(block_bytes).decode('utf-8')}) # Getting wallet history diff --git a/tests/resources/wallet/test_nano_contract.py b/tests/resources/wallet/test_nano_contract.py index c42fee995..f5d6e345a 100644 --- a/tests/resources/wallet/test_nano_contract.py +++ b/tests/resources/wallet/test_nano_contract.py @@ -1,5 +1,6 @@ from twisted.internet.defer import inlineCallbacks +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.transaction.resources import DecodeTxResource, PushTxResource from hathor.util import json_loadb @@ -11,7 +12,7 @@ ) from tests import unittest from tests.resources.base_resource import StubSite, TestDummyRequest, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks +from tests.utils import add_blocks_unlock_reward class BaseNanoContractsTest(_BaseResourceTest._ResourceTest): diff --git a/tests/resources/wallet/test_search_address.py b/tests/resources/wallet/test_search_address.py index 8ec09a251..01892526d 100644 --- a/tests/resources/wallet/test_search_address.py +++ b/tests/resources/wallet/test_search_address.py @@ -2,11 +2,12 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction.scripts import parse_address_script from hathor.wallet.resources.thin_wallet import AddressBalanceResource, AddressSearchResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, create_tokens +from tests.utils import add_blocks_unlock_reward, create_tokens settings = HathorSettings() diff --git a/tests/resources/wallet/test_send_tokens.py b/tests/resources/wallet/test_send_tokens.py index 0e5cbc533..3c98bf3df 100644 --- a/tests/resources/wallet/test_send_tokens.py +++ b/tests/resources/wallet/test_send_tokens.py @@ -2,12 +2,14 @@ from twisted.internet.defer import inlineCallbacks -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import TestMode +from hathor.mining.cpu_mining_service import CpuMiningService from hathor.p2p.resources import MiningResource +from hathor.simulator.utils import add_new_blocks from hathor.wallet.resources import BalanceResource, HistoryResource, SendTokensResource from tests import unittest from tests.resources.base_resource import StubSite, TestDummyRequest, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, resolve_block_bytes +from tests.utils import add_blocks_unlock_reward, resolve_block_bytes class BaseSendTokensTest(_BaseResourceTest._ResourceTest): @@ -25,7 +27,10 @@ def test_post(self): # Mining new block response_mining = yield self.web_mining.get("mining") data_mining = response_mining.json_value() - block_bytes = resolve_block_bytes(block_bytes=data_mining['block_bytes']) + block_bytes = resolve_block_bytes( + block_bytes=data_mining['block_bytes'], + cpu_mining_service=CpuMiningService() + ) yield self.web_mining.post("mining", {'block_bytes': base64.b64encode(block_bytes).decode('utf-8')}) add_blocks_unlock_reward(self.manager) self.reactor.advance(10) @@ -168,7 +173,7 @@ def test_post(self): @inlineCallbacks def test_tx_weight(self): - _set_test_mode(TestMode.DISABLED) + self.manager.daa.TEST_MODE = TestMode.DISABLED add_new_blocks(self.manager, 3, advance_clock=1) add_blocks_unlock_reward(self.manager) self.reactor.advance(3) diff --git a/tests/resources/wallet/test_thin_wallet.py b/tests/resources/wallet/test_thin_wallet.py index e9d0d4b31..f14a7733c 100644 --- a/tests/resources/wallet/test_thin_wallet.py +++ b/tests/resources/wallet/test_thin_wallet.py @@ -4,7 +4,7 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH, create_output_script, parse_address_script from hathor.wallet.resources.thin_wallet import ( @@ -15,7 +15,7 @@ ) from tests import unittest from tests.resources.base_resource import StubSite, TestDummyRequest, _BaseResourceTest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_tx, create_tokens +from tests.utils import add_blocks_unlock_reward, add_new_tx, create_tokens settings = HathorSettings() @@ -85,7 +85,7 @@ def test_post(self): i.data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx2.inputs = [i] tx2.timestamp = int(self.clock.seconds()) - tx2.weight = minimum_tx_weight(tx2) + tx2.weight = self.manager.daa.minimum_tx_weight(tx2) response_wrong_amount = yield self.web.post('thin_wallet/send_tokens', {'tx_hex': tx2.get_struct().hex()}) data_wrong_amount = response_wrong_amount.json_value() @@ -100,7 +100,7 @@ def test_post(self): i.data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx3.inputs = [i] tx3.timestamp = int(self.clock.seconds()) - tx3.weight = minimum_tx_weight(tx3) + tx3.weight = self.manager.daa.minimum_tx_weight(tx3) # Then send tokens response = yield self.web.post('thin_wallet/send_tokens', {'tx_hex': tx3.get_struct().hex()}) @@ -423,9 +423,9 @@ def test_token_history(self): i.data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx2.inputs = [i] tx2.timestamp = int(self.clock.seconds()) - tx2.weight = minimum_tx_weight(tx2) + tx2.weight = self.manager.daa.minimum_tx_weight(tx2) tx2.parents = self.manager.get_new_tx_parents() - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.propagate_tx(tx2) # Now we have 2 txs with this token diff --git a/tests/simulation/test_simulator.py b/tests/simulation/test_simulator.py index cce6c795b..aac7edd66 100644 --- a/tests/simulation/test_simulator.py +++ b/tests/simulation/test_simulator.py @@ -2,6 +2,7 @@ from hathor.simulator import FakeConnection from hathor.simulator.trigger import All as AllTriggers, StopWhenSynced +from hathor.verification.vertex_verifier import VertexVerifier from tests import unittest from tests.simulation.base import SimulatorTestCase @@ -12,7 +13,7 @@ def test_verify_pow(self): # just get one of the genesis, we don't really need to create any transaction tx = next(iter(manager1.tx_storage.get_all_genesis())) # optional argument must be valid, it just has to not raise any exception, there's no assert for that - tx.verify_pow(0.) + VertexVerifier(settings=self._settings, daa=manager1.daa).verify_pow(tx, override_weight=0.) def test_one_node(self): manager1 = self.create_peer() @@ -87,7 +88,9 @@ def test_many_miners_since_beginning(self): for miner in miners: miner.stop() - self.assertTrue(self.simulator.run(3600, trigger=AllTriggers(stop_triggers))) + # TODO Add self.assertTrue(...) when the trigger is fixed. + # For further information, see https://github.com/HathorNetwork/hathor-core/pull/815. + self.simulator.run(3600, trigger=AllTriggers(stop_triggers)) for node in nodes[1:]: self.assertTipsEqual(nodes[0], node) diff --git a/tests/simulation/test_trigger.py b/tests/simulation/test_trigger.py index cb7ddb1ec..b91e4e293 100644 --- a/tests/simulation/test_trigger.py +++ b/tests/simulation/test_trigger.py @@ -1,5 +1,8 @@ -from hathor.simulator import Simulator -from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks +import re + +from hathor.p2p.messages import ProtocolMessages +from hathor.simulator import FakeConnection, Simulator +from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks, StopWhenSendLineMatch from tests import unittest @@ -58,3 +61,13 @@ def test_stop_after_minimum_balance(self): self.assertLess(wallet.balance[token_uid].available, minimum_balance) self.assertTrue(self.simulator.run(3600, trigger=trigger)) self.assertGreaterEqual(wallet.balance[token_uid].available, minimum_balance) + + def test_stop_after_sendline(self): + manager2 = self.simulator.create_peer() + conn12 = FakeConnection(self.manager1, manager2, latency=0.05) + self.simulator.add_connection(conn12) + + expected_prefix = f'^{ProtocolMessages.PEER_ID.value} '.encode('ascii') + regex = re.compile(expected_prefix) + trigger = StopWhenSendLineMatch(conn12._proto1, regex) + self.assertTrue(self.simulator.run(120, trigger=trigger)) diff --git a/tests/sysctl/test_p2p.py b/tests/sysctl/test_p2p.py index bd7216e0b..726e0d78a 100644 --- a/tests/sysctl/test_p2p.py +++ b/tests/sysctl/test_p2p.py @@ -123,15 +123,52 @@ def test_always_enable_sync(self): self.assertEqual(connections.always_enable_sync, set(content)) self.assertEqual(set(sysctl.get('always_enable_sync')), set(content)) + def test_available_sync_versions(self): + from hathor.p2p.sync_version import SyncVersion + + manager = self.create_peer() + connections = manager.connections + sysctl = ConnectionsManagerSysctl(connections) + + self.assertEqual(sysctl.get('available_sync_versions'), ['v1', 'v2']) + + del connections._sync_factories[SyncVersion.V2] + self.assertEqual(sysctl.get('available_sync_versions'), ['v1']) + + def _default_enabled_sync_versions(self) -> list[str]: + raise NotImplementedError + + def test_enabled_sync_versions(self): + manager = self.create_peer() + connections = manager.connections + sysctl = ConnectionsManagerSysctl(connections) + + self.assertEqual(sysctl.get('enabled_sync_versions'), self._default_enabled_sync_versions()) + sysctl.set('enabled_sync_versions', ['v1', 'v2']) + self.assertEqual(sysctl.get('enabled_sync_versions'), ['v1', 'v2']) + sysctl.set('enabled_sync_versions', ['v2']) + self.assertEqual(sysctl.get('enabled_sync_versions'), ['v2']) + sysctl.set('enabled_sync_versions', ['v1']) + self.assertEqual(sysctl.get('enabled_sync_versions'), ['v1']) + class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, BaseRandomSimulatorTestCase): __test__ = True + def _default_enabled_sync_versions(self) -> list[str]: + return ['v1'] + class SyncV2RandomSimulatorTestCase(unittest.SyncV2Params, BaseRandomSimulatorTestCase): __test__ = True + def _default_enabled_sync_versions(self) -> list[str]: + return ['v2'] + # sync-bridge should behave like sync-v2 class SyncBridgeRandomSimulatorTestCase(unittest.SyncBridgeParams, SyncV2RandomSimulatorTestCase): __test__ = True + + def _default_enabled_sync_versions(self) -> list[str]: + return ['v1', 'v2'] diff --git a/tests/sysctl/test_runner.py b/tests/sysctl/test_runner.py new file mode 100644 index 000000000..e4d1c6806 --- /dev/null +++ b/tests/sysctl/test_runner.py @@ -0,0 +1,31 @@ + +import pytest + +from hathor.sysctl import Sysctl +from hathor.sysctl.runner import SysctlRunner + + +@pytest.mark.parametrize( + 'args', + [ + 'string', + "\"", + 1, + True, + False, + 'a,b', + (1, 2, 3), + (1, 'string', True), + [1, 2, 3], + (1, [1, 2, 3]), + (1, ["a,a,a", "b", "c"]), + ] +) +def test_deserialize(args): + root = Sysctl() + runner = SysctlRunner(root) + + args_serialized = runner.serialize(args) + args_deserialized = runner.deserialize(args_serialized) + + assert args == args_deserialized diff --git a/tests/tx/test_accumulated_weight.py b/tests/tx/test_accumulated_weight.py index 30d170b84..8f19e00ff 100644 --- a/tests/tx/test_accumulated_weight.py +++ b/tests/tx/test_accumulated_weight.py @@ -1,7 +1,8 @@ +from hathor.simulator.utils import add_new_blocks from hathor.transaction import sum_weights from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_transactions +from tests.utils import add_blocks_unlock_reward, add_new_transactions class BaseAccumulatedWeightTestCase(unittest.TestCase): diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index a7b362dfe..a363cfb78 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -16,10 +16,14 @@ import pytest -from hathor.conf import HathorSettings from hathor.conf.get_settings import get_settings +from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService from hathor.transaction import Block, TransactionMetadata +from hathor.transaction.exceptions import BlockMustSignalError from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage +from hathor.verification.block_verifier import BlockVerifier def test_calculate_feature_activation_bit_counts_genesis(): @@ -27,13 +31,14 @@ def test_calculate_feature_activation_bit_counts_genesis(): storage = TransactionMemoryStorage() genesis_block = storage.get_transaction(settings.GENESIS_BLOCK_HASH) assert isinstance(genesis_block, Block) - result = genesis_block.calculate_feature_activation_bit_counts() + result = genesis_block.get_feature_activation_bit_counts() assert result == [0, 0, 0, 0] @pytest.fixture def block_mocks() -> list[Block]: + settings = get_settings() blocks: list[Block] = [] feature_activation_bits = [ 0b0000, # 0: boundary block @@ -51,7 +56,6 @@ def block_mocks() -> list[Block]: ] for i, bits in enumerate(feature_activation_bits): - settings = HathorSettings() genesis_hash = settings.GENESIS_BLOCK_HASH block_hash = genesis_hash if i == 0 else b'some_hash' @@ -88,7 +92,7 @@ def test_calculate_feature_activation_bit_counts( expected_counts: list[int] ) -> None: block = block_mocks[block_height] - result = block.calculate_feature_activation_bit_counts() + result = block.get_feature_activation_bit_counts() assert result == expected_counts @@ -132,3 +136,45 @@ def test_get_feature_activation_bit_value() -> None: assert block.get_feature_activation_bit_value(1) == 0 assert block.get_feature_activation_bit_value(2) == 1 assert block.get_feature_activation_bit_value(3) == 0 + + +@pytest.mark.parametrize( + 'is_signaling_mandatory_features', + [BlockIsSignaling(), BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1)] +) +def test_verify_must_signal_when_feature_activation_is_disabled(is_signaling_mandatory_features: bool) -> None: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION.enable_usage = False + feature_service = Mock(spec_set=FeatureService) + feature_service.is_signaling_mandatory_features = Mock(return_value=is_signaling_mandatory_features) + verifier = BlockVerifier(settings=settings, feature_service=feature_service, daa=Mock()) + block = Block() + + verifier.verify_mandatory_signaling(block) + + +def test_verify_must_signal() -> None: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION.enable_usage = True + feature_service = Mock(spec_set=FeatureService) + feature_service.is_signaling_mandatory_features = Mock( + return_value=BlockIsMissingSignal(feature=Feature.NOP_FEATURE_1) + ) + verifier = BlockVerifier(settings=settings, feature_service=feature_service, daa=Mock()) + block = Block() + + with pytest.raises(BlockMustSignalError) as e: + verifier.verify_mandatory_signaling(block) + + assert str(e.value) == "Block must signal support for feature 'NOP_FEATURE_1' during MUST_SIGNAL phase." + + +def test_verify_must_not_signal() -> None: + settings = Mock(spec_set=HathorSettings) + settings.FEATURE_ACTIVATION.enable_usage = True + feature_service = Mock(spec_set=FeatureService) + feature_service.is_signaling_mandatory_features = Mock(return_value=BlockIsSignaling()) + verifier = BlockVerifier(settings=settings, feature_service=feature_service, daa=Mock()) + block = Block() + + verifier.verify_mandatory_signaling(block) diff --git a/tests/tx/test_blockchain.py b/tests/tx/test_blockchain.py index d808975e9..02ec4cd07 100644 --- a/tests/tx/test_blockchain.py +++ b/tests/tx/test_blockchain.py @@ -1,11 +1,12 @@ from itertools import chain from hathor.conf import HathorSettings -from hathor.daa import TestMode, _set_test_mode, get_weight_decay_amount +from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode +from hathor.simulator.utils import add_new_blocks from hathor.transaction import sum_weights from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest -from tests.utils import add_new_blocks, add_new_transactions +from tests.utils import add_new_transactions settings = HathorSettings() @@ -30,6 +31,7 @@ def setUp(self): self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] + self.daa = DifficultyAdjustmentAlgorithm(settings=settings) def test_single_chain(self): """ All new blocks belong to case (i). @@ -114,7 +116,7 @@ def test_single_fork_not_best(self): # Change the order of the transactions to change the hash fork_block1 = manager.generate_mining_block() fork_block1.parents = [fork_block1.parents[0]] + fork_block1.parents[:0:-1] - fork_block1.resolve() + manager.cpu_mining_service.resolve(fork_block1) manager.verification_service.verify(fork_block1) # Mine 8 blocks in a row @@ -166,7 +168,7 @@ def test_single_fork_not_best(self): # Propagate a block connected to the voided chain # This block belongs to case (iv). fork_block3 = manager.generate_mining_block(parent_block_hash=fork_block1.hash) - fork_block3.resolve() + manager.cpu_mining_service.resolve(fork_block3) manager.verification_service.verify(fork_block3) self.assertTrue(manager.propagate_tx(fork_block3)) fork_meta3 = fork_block3.get_metadata() @@ -236,7 +238,7 @@ def test_multiple_forks(self): # Propagate a block connected to the voided chain, case (iii). fork_block2 = manager.generate_mining_block(parent_block_hash=sidechain[-1].hash) - fork_block2.resolve() + manager.cpu_mining_service.resolve(fork_block2) manager.verification_service.verify(fork_block2) self.assertTrue(manager.propagate_tx(fork_block2)) sidechain.append(fork_block2) @@ -284,7 +286,7 @@ def test_multiple_forks(self): # Propagate a block connected to the side chain, case (v). fork_block3 = manager.generate_mining_block(parent_block_hash=fork_block2.hash) - fork_block3.resolve() + manager.cpu_mining_service.resolve(fork_block3) manager.verification_service.verify(fork_block3) self.assertTrue(manager.propagate_tx(fork_block3)) sidechain.append(fork_block3) @@ -310,7 +312,7 @@ def test_multiple_forks(self): # Another side chain has direcly exceeded the best score. fork_block4 = manager.generate_mining_block(parent_block_hash=sidechain3[-1].hash) fork_block4.weight = 10 - fork_block4.resolve() + manager.cpu_mining_service.resolve(fork_block4) manager.verification_service.verify(fork_block4) self.assertTrue(manager.propagate_tx(fork_block4)) sidechain3.append(fork_block4) @@ -389,8 +391,8 @@ def test_block_rewards(self): def test_daa_sanity(self): # sanity test the DAA - _set_test_mode(TestMode.DISABLED) manager = self.create_peer('testnet', tx_storage=self.tx_storage) + manager.daa.TEST_MODE = TestMode.DISABLED N = settings.BLOCK_DIFFICULTY_N_BLOCKS T = settings.AVG_TIME_BETWEEN_BLOCKS manager.avg_time_between_blocks = T @@ -416,46 +418,34 @@ def test_daa_sanity(self): self.assertLess(new_weight, base_weight) def test_daa_weight_decay_amount(self): - _set_test_mode(TestMode.DISABLED) + self.daa.TEST_MODE = TestMode.DISABLED amount = settings.WEIGHT_DECAY_AMOUNT for distance in range(0, settings.WEIGHT_DECAY_ACTIVATE_DISTANCE, 10): - self.assertEqual(get_weight_decay_amount(distance), 0) + self.assertEqual(self.daa.get_weight_decay_amount(distance), 0) distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - 1 - self.assertAlmostEqual(get_weight_decay_amount(distance), 0) + self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), 0) distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE for k in range(1, 11): for _ in range(settings.WEIGHT_DECAY_WINDOW_SIZE): - self.assertAlmostEqual(get_weight_decay_amount(distance), k * amount) + self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), k * amount) distance += 1 - self.assertAlmostEqual(get_weight_decay_amount(distance), 11 * amount) + self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), 11 * amount) def test_daa_weight_decay_blocks(self): - from hathor import daa - orig_avg_time_between_blocks = daa.AVG_TIME_BETWEEN_BLOCKS - orig_min_block_weight = daa.MIN_BLOCK_WEIGHT - - try: - self._test_daa_weight_decay_blocks() - finally: - daa.AVG_TIME_BETWEEN_BLOCKS = orig_avg_time_between_blocks - daa.MIN_BLOCK_WEIGHT = orig_min_block_weight - - def _test_daa_weight_decay_blocks(self): - _set_test_mode(TestMode.DISABLED) manager = self.create_peer('testnet', tx_storage=self.tx_storage) + manager.daa.TEST_MODE = TestMode.DISABLED amount = settings.WEIGHT_DECAY_AMOUNT - from hathor import daa - daa.AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS - daa.MIN_BLOCK_WEIGHT = 2 + 2 * settings.WEIGHT_DECAY_AMOUNT + manager.daa.AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS + manager.daa.MIN_BLOCK_WEIGHT = 2 + 2 * settings.WEIGHT_DECAY_AMOUNT add_new_blocks(manager, 2 * settings.BLOCK_DIFFICULTY_N_BLOCKS, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) - daa.MIN_BLOCK_WEIGHT = 1 + manager.daa.MIN_BLOCK_WEIGHT = 1 base_weight = manager.generate_mining_block().weight - self.assertGreater(base_weight, daa.MIN_BLOCK_WEIGHT) + self.assertGreater(base_weight, manager.daa.MIN_BLOCK_WEIGHT) add_new_blocks(manager, 20, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) @@ -482,7 +472,7 @@ def _test_daa_weight_decay_blocks(self): manager.reactor.advance(1) weight = manager.generate_mining_block().weight - self.assertAlmostEqual(weight, daa.MIN_BLOCK_WEIGHT) + self.assertAlmostEqual(weight, manager.daa.MIN_BLOCK_WEIGHT) class SyncV1BlockchainTestCase(unittest.SyncV1Params, BaseBlockchainTestCase): diff --git a/tests/tx/test_cache_storage.py b/tests/tx/test_cache_storage.py index a7832bd1c..d9aac999c 100644 --- a/tests/tx/test_cache_storage.py +++ b/tests/tx/test_cache_storage.py @@ -1,8 +1,9 @@ -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import TestMode +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TransactionMetadata from hathor.transaction.storage import TransactionCacheStorage from tests import unittest -from tests.utils import add_new_blocks, add_new_transactions +from tests.utils import add_new_transactions CACHE_SIZE = 5 @@ -144,7 +145,7 @@ def test_flush_thread(self): self.cache_storage._flush_to_storage(self.cache_storage.dirty_txs.copy()) def test_topological_sort_dfs(self): - _set_test_mode(TestMode.TEST_ALL_WEIGHT) + self.manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT add_new_blocks(self.manager, 11, advance_clock=1) tx = add_new_transactions(self.manager, 1, advance_clock=1)[0] diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index a30759193..885395fa7 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -1,6 +1,9 @@ from hathor.conf import HathorSettings -from hathor.daa import TestMode, _set_test_mode, calculate_block_difficulty, minimum_tx_weight +from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.transaction.storage import TransactionMemoryStorage +from hathor.verification.verification_service import VerificationService +from hathor.verification.vertex_verifier import VertexVerifier +from hathor.verification.vertex_verifiers import VertexVerifiers from tests import unittest settings = HathorSettings() @@ -26,18 +29,22 @@ def get_genesis_output(): class GenesisTest(unittest.TestCase): def setUp(self): super().setUp() + self._daa = DifficultyAdjustmentAlgorithm(settings=self._settings) + verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=self._daa) + self._verification_service = VerificationService(verifiers=verifiers) self.storage = TransactionMemoryStorage() def test_pow(self): + verifier = VertexVerifier(settings=self._settings, daa=self._daa) genesis = self.storage.get_all_genesis() for g in genesis: self.assertEqual(g.calculate_hash(), g.hash) - self.assertIsNone(g.verify_pow()) + self.assertIsNone(verifier.verify_pow(g)) def test_verify(self): genesis = self.storage.get_all_genesis() for g in genesis: - g.verify_without_storage() + self._verification_service.verify_without_storage(g) def test_output(self): # Test if block output is valid @@ -64,10 +71,10 @@ def test_genesis_weight(self): # Validate the block and tx weight # in test mode weight is always 1 - _set_test_mode(TestMode.TEST_ALL_WEIGHT) - self.assertEqual(calculate_block_difficulty(genesis_block), 1) - self.assertEqual(minimum_tx_weight(genesis_tx), 1) + self._daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT + self.assertEqual(self._daa.calculate_block_difficulty(genesis_block), 1) + self.assertEqual(self._daa.minimum_tx_weight(genesis_tx), 1) - _set_test_mode(TestMode.DISABLED) - self.assertEqual(calculate_block_difficulty(genesis_block), genesis_block.weight) - self.assertEqual(minimum_tx_weight(genesis_tx), genesis_tx.weight) + self._daa.TEST_MODE = TestMode.DISABLED + self.assertEqual(self._daa.calculate_block_difficulty(genesis_block), genesis_block.weight) + self.assertEqual(self._daa.minimum_tx_weight(genesis_tx), genesis_tx.weight) diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index 5227b525d..b26652a9a 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -3,21 +3,13 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.graphviz import GraphvizVisualizer +from hathor.simulator.utils import add_new_block, add_new_blocks from hathor.storage.rocksdb_storage import RocksDBStorage from hathor.transaction import Transaction from hathor.util import iwindows from hathor.wallet import Wallet from tests import unittest -from tests.utils import ( - HAS_ROCKSDB, - add_blocks_unlock_reward, - add_custom_tx, - add_new_block, - add_new_blocks, - add_new_tx, - gen_new_tx, - get_genesis_key, -) +from tests.utils import HAS_ROCKSDB, add_blocks_unlock_reward, add_custom_tx, add_new_tx, get_genesis_key settings = HathorSettings() @@ -40,7 +32,7 @@ def test_tx_tips_with_conflict(self): tx1.weight = 2.0 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.assertTrue(self.manager.propagate_tx(tx1, False)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( @@ -55,7 +47,7 @@ def test_tx_tips_with_conflict(self): tx2.parents = [tx1.hash] + self.manager.get_new_tx_parents()[1:] self.assertIn(tx1.hash, tx2.parents) tx2.timestamp = int(self.clock.seconds()) + 1 - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.assertTrue(self.manager.propagate_tx(tx2, False)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( @@ -66,7 +58,7 @@ def test_tx_tips_with_conflict(self): tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.timestamp = tx2.timestamp + 1 self.assertIn(tx1.hash, tx3.parents) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) self.assertNotEqual(tx2.hash, tx3.hash) self.assertTrue(self.manager.propagate_tx(tx3, False)) self.assertIn(tx3.hash, tx2.get_metadata().conflict_with) @@ -97,7 +89,7 @@ def test_tx_tips_voided(self): tx1.weight = 2.0 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.assertTrue(self.manager.propagate_tx(tx1, False)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( @@ -110,7 +102,7 @@ def test_tx_tips_voided(self): tx2.parents = [tx1.hash] + self.manager.get_new_tx_parents()[1:] self.assertIn(tx1.hash, tx2.parents) tx2.timestamp = int(self.clock.seconds()) + 1 - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.assertTrue(self.manager.propagate_tx(tx2, False)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( @@ -123,7 +115,7 @@ def test_tx_tips_voided(self): # tx3.timestamp = tx2.timestamp + 1 tx3.parents = tx1.parents # self.assertIn(tx1.hash, tx3.parents) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) self.assertNotEqual(tx2.hash, tx3.hash) self.assertTrue(self.manager.propagate_tx(tx3, False)) # self.assertIn(tx3.hash, tx2.get_metadata().voided_by) @@ -144,169 +136,6 @@ def test_genesis_not_in_mempool(self): for tx in self.genesis_txs: self.assertNotIn(tx, mempool_txs) - def _test_confirmed_tx_that_spends_unconfirmed_tx(self, debug=False): - """ - B ────╮────╮ - A ───vv v - C ~~> D -> E - - debug=True is only useful to debug the base and dag setup, it will break the test - """ - from hathor.transaction import Block, TxInput, TxOutput - from hathor.transaction.scripts import P2PKH - from hathor.wallet.base_wallet import WalletOutputInfo - - # --- - # BEGIN SETUP BASE - # make some outputs to be spent by A, B and C, and also save some addresses blocks/txs to be used later - add_new_blocks(self.manager, 5, advance_clock=15) - block0 = add_blocks_unlock_reward(self.manager)[-1] - self.wallet.unlock(b'123') - self.wallet.generate_keys() - address = list(self.wallet.keys.keys())[0] - baddress = decode_address(address) - private_key = self.wallet.get_private_key(address) - tx0 = self.manager.wallet.prepare_transaction_compute_inputs( - Transaction, - [ - WalletOutputInfo(address=baddress, value=10, timelock=None), - WalletOutputInfo(address=baddress, value=10, timelock=None), - WalletOutputInfo(address=baddress, value=10, timelock=None), - ], - self.manager.tx_storage, - ) - tx0.weight = 1.0 - tx0.parents = self.manager.get_new_tx_parents() - tx0.timestamp = int(self.clock.seconds()) - tx0.resolve() - # XXX: tx0.outputs[0] is always the change output for some reason - self.assertEqual(len(tx0.outputs), 4) - self.assertEqual(tx0.outputs[1], tx0.outputs[2]) - self.assertEqual(tx0.outputs[1], tx0.outputs[3]) - self.assertTrue(self.manager.propagate_tx(tx0, False)) - parents0 = [tx0.hash, tx0.parents[0]] - # END SETUP BASE - - # --- - # BEGIN SETUP DAG - # tx_A: ordinary transaction - self.tx_A = Transaction( - timestamp=(tx0.timestamp + 1), - weight=1.0, - inputs=[TxInput(tx0.hash, 1, b'')], - outputs=[TxOutput(10, P2PKH.create_output_script(baddress))], - parents=list(parents0), - storage=self.tx_storage, - ) - self.tx_A.inputs[0].data = P2PKH.create_input_data( - *self.wallet.get_input_aux_data(self.tx_A.get_sighash_all(), private_key) - ) - self.tx_A.resolve() - if debug: - self.assertTrue(self.manager.propagate_tx(self.tx_A, False)) - self.assertFalse(self.tx_A.get_metadata().voided_by) - - # tx_B: ordinary transaction, not related to tx_A - self.tx_B = Transaction( - timestamp=(tx0.timestamp + 1), - weight=1.0, - inputs=[TxInput(tx0.hash, 2, b'')], - outputs=[TxOutput(10, P2PKH.create_output_script(baddress))], - parents=list(parents0), - storage=self.tx_storage, - ) - self.tx_B.inputs[0].data = P2PKH.create_input_data( - *self.wallet.get_input_aux_data(self.tx_B.get_sighash_all(), private_key) - ) - self.tx_B.resolve() - if debug: - self.assertTrue(self.manager.propagate_tx(self.tx_B, False)) - self.assertFalse(self.tx_B.get_metadata().voided_by) - self.assertFalse(self.tx_A.get_metadata().conflict_with) - self.assertFalse(self.tx_B.get_metadata().conflict_with) - - # tx_C: tip transaction, not related to tx_A or tx_B, must not be the parent of any tx/block - self.tx_C = Transaction( - timestamp=(tx0.timestamp + 1), - weight=1.0, - inputs=[TxInput(tx0.hash, 3, b'')], - outputs=[TxOutput(10, P2PKH.create_output_script(baddress))], - parents=list(parents0), - storage=self.tx_storage, - ) - self.tx_C.inputs[0].data = P2PKH.create_input_data( - *self.wallet.get_input_aux_data(self.tx_C.get_sighash_all(), private_key) - ) - self.tx_C.resolve() - if debug: - self.assertTrue(self.manager.propagate_tx(self.tx_C, False)) - self.assertFalse(self.tx_C.get_metadata().voided_by) - self.assertFalse(self.tx_A.get_metadata().conflict_with) - self.assertFalse(self.tx_B.get_metadata().conflict_with) - self.assertFalse(self.tx_C.get_metadata().conflict_with) - - # tx_D: has tx_A and tx_B as parents, but spends from tx_C, confirmed by block_E - self.tx_D = Transaction( - timestamp=(self.tx_A.timestamp + 1), - weight=1.0, - inputs=[ - TxInput(self.tx_A.hash, 0, b''), - TxInput(self.tx_B.hash, 0, b''), - TxInput(self.tx_C.hash, 0, b''), - ], - outputs=[TxOutput(30, P2PKH.create_output_script(baddress))], - parents=[self.tx_A.hash, self.tx_B.hash], - storage=self.tx_storage, - ) - for i in range(3): - self.tx_D.inputs[i].data = P2PKH.create_input_data( - *self.wallet.get_input_aux_data(self.tx_D.get_sighash_all(), private_key) - ) - self.tx_D.resolve() - if debug: - self.assertTrue(self.manager.propagate_tx(self.tx_D, False)) - self.assertFalse(self.tx_D.get_metadata().voided_by) - - # block_E: has tx_D as parent (and also tx_A, to fill it up, but MUST NOT confirm tx_C - self.block_E = Block( - timestamp=(self.tx_D.timestamp + 1), - outputs=[TxOutput(6400, P2PKH.create_output_script(baddress))], - parents=[block0.hash, self.tx_D.hash, self.tx_B.hash], - weight=1.0, - storage=self.tx_storage, - ) - self.block_E.resolve() - if debug: - self.assertTrue(self.manager.propagate_tx(self.block_E, False)) - self.assertFalse(self.block_E.get_metadata().voided_by) - tips = [x.data for x in self.tx_storage.get_all_tips()] - self.assertEqual(set(tips), {self.tx_C.hash, self.block_E.hash}) - # END SETUP DAG - - # --- - # BEGIN TEST INDEX BEHAVIOR - # order of operations to simulate what will happen on sync-v2 and what we want to avoid: - deps_index = self.manager.tx_storage.indexes.deps - - # - add block_E to deps-index, it should then say tx_D and tx_B are needed - self.assertFalse(self.block_E.get_metadata().validation.is_fully_connected()) - deps_index.add_tx(self.block_E) - self.assertEqual( - set(deps_index._iter_needed_txs()), - {self.tx_D.hash, self.tx_B.hash}, - ) - - # - add tx_D to deps-index, it should now say tx_A, tx_B and most importantly tx_C are needed - self.assertFalse(self.tx_D.get_metadata().validation.is_fully_connected()) - deps_index.add_tx(self.tx_D) - deps_index.remove_from_needed_index(self.tx_D.hash) - # XXX: the next assert will fail when the index does not use tx.get_all_dependencies() - self.assertEqual( - set(deps_index._iter_needed_txs()), - {self.tx_A.hash, self.tx_B.hash, self.tx_C.hash}, - ) - # END TEST INDEX BEHAVIOR - def test_utxo_index_genesis(self): from hathor.indexes.utxo_index import UtxoIndexItem from tests.utils import GENESIS_ADDRESS_B58 @@ -438,7 +267,7 @@ def check_utxos(*args): block2.parents[1:] = [txA2.hash, txB2.hash] block2.timestamp = block1.timestamp block2.weight = 1.2 - block2.resolve() + self.manager.cpu_mining_service.resolve(block2) self.manager.verification_service.validate_full(block2) self.manager.propagate_tx(block2, fails_silently=False) self.graphviz.labels[block2.hash] = 'block2' @@ -658,7 +487,7 @@ def test_utxo_index_after_push_tx(self): tx1.inputs[0].data = P2PKH.create_input_data( *wallet.get_input_aux_data(tx1.get_sighash_all(), wallet.get_private_key(address)) ) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.assertTrue(self.manager.propagate_tx(tx1, False)) self.assertEqual( @@ -733,7 +562,7 @@ def test_utxo_index_last(self): tx1.inputs[0].data = P2PKH.create_input_data( *wallet.get_input_aux_data(tx1.get_sighash_all(), wallet.get_private_key(address)) ) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.assertTrue(self.manager.propagate_tx(tx1, False)) # querying for exact values @@ -924,38 +753,6 @@ class SyncV1MemoryIndexesTest(unittest.SyncV1Params, BaseMemoryIndexesTest): class SyncV2MemoryIndexesTest(unittest.SyncV2Params, BaseMemoryIndexesTest): __test__ = True - def test_deps_index(self) -> None: - from hathor.indexes.memory_deps_index import MemoryDepsIndex - - add_new_blocks(self.manager, 5, advance_clock=15) - add_blocks_unlock_reward(self.manager) - - # XXX: this test makes use of the internals of the memory deps-index implementation - deps_index: MemoryDepsIndex = self.manager.tx_storage.indexes.deps - - address = self.get_address(0) - value = 500 - tx = gen_new_tx(self.manager, address, value) - - # call add_tx the first time - deps_index.add_tx(tx) - - # snapshot of state before - rev_dep_index = deps_index._rev_dep_index.copy() - txs_with_deps_ready = deps_index._txs_with_deps_ready.copy() - needed_txs_index = deps_index._needed_txs_index.copy() - - # call add_tx the second time - deps_index.add_tx(tx) - - # state must not have changed - self.assertEqual(rev_dep_index, deps_index._rev_dep_index) - self.assertEqual(txs_with_deps_ready, deps_index._txs_with_deps_ready) - self.assertEqual(needed_txs_index, deps_index._needed_txs_index) - - def test_confirmed_tx_that_spends_unconfirmed_tx(self): - self._test_confirmed_tx_that_spends_unconfirmed_tx() - # sync-bridge should behave like sync-v2 class SyncBridgeMemoryIndexesTest(unittest.SyncBridgeParams, SyncV2MemoryIndexesTest): @@ -969,42 +766,6 @@ class SyncV1RocksDBIndexesTest(unittest.SyncV1Params, BaseRocksDBIndexesTest): class SyncV2RocksDBIndexesTest(unittest.SyncV2Params, BaseRocksDBIndexesTest): __test__ = True - def test_deps_index(self) -> None: - from hathor.indexes.rocksdb_deps_index import RocksDBDepsIndex - - indexes = self.manager.tx_storage.indexes - indexes.deps = RocksDBDepsIndex(indexes._db, _force=True) - - add_new_blocks(self.manager, 5, advance_clock=15) - add_blocks_unlock_reward(self.manager) - - # XXX: this test makes use of the internals of the rocksdb deps-index implementation - deps_index: RocksDBDepsIndex = self.manager.tx_storage.indexes.deps - - address = self.get_address(0) - value = 500 - tx = gen_new_tx(self.manager, address, value) - - # call add_tx the first time - deps_index.add_tx(tx) - - # snapshot of state before - db_dict_before = deps_index._clone_into_dict() - - # call add_tx the second time - deps_index.add_tx(tx) - - # state must not have changed - db_dict_after = deps_index._clone_into_dict() - self.assertEqual(db_dict_before, db_dict_after) - - def test_confirmed_tx_that_spends_unconfirmed_tx(self): - from hathor.indexes.rocksdb_deps_index import RocksDBDepsIndex - - indexes = self.manager.tx_storage.indexes - indexes.deps = RocksDBDepsIndex(indexes._db, _force=True) - self._test_confirmed_tx_that_spends_unconfirmed_tx() - # sync-bridge should behave like sync-v2 class SyncBridgeRocksDBIndexesTest(unittest.SyncBridgeParams, SyncV2RocksDBIndexesTest): diff --git a/tests/tx/test_indexes4.py b/tests/tx/test_indexes4.py index 2e9290c0d..cc0e726a3 100644 --- a/tests/tx/test_indexes4.py +++ b/tests/tx/test_indexes4.py @@ -1,9 +1,10 @@ from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks, gen_new_tx from hathor.transaction import Transaction from hathor.transaction.storage import TransactionMemoryStorage from hathor.wallet.base_wallet import WalletOutputInfo from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, gen_new_tx +from tests.utils import add_blocks_unlock_reward class BaseSimulatorIndexesTestCase(unittest.TestCase): @@ -29,7 +30,7 @@ def _build_randomized_blockchain(self, *, utxo_index=False): tx1.weight = 2.0 tx1.parents = manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + manager.cpu_mining_service.resolve(tx1) assert manager.propagate_tx(tx1, False) tx2 = manager.wallet.prepare_transaction_compute_inputs(Transaction, outputs, manager.tx_storage) @@ -37,13 +38,13 @@ def _build_randomized_blockchain(self, *, utxo_index=False): tx2.parents = [tx1.hash] + manager.get_new_tx_parents()[1:] self.assertIn(tx1.hash, tx2.parents) tx2.timestamp = int(self.clock.seconds()) + 1 - tx2.resolve() + manager.cpu_mining_service.resolve(tx2) assert manager.propagate_tx(tx2, False) tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.weight = 3.0 tx3.parents = tx1.parents - tx3.resolve() + manager.cpu_mining_service.resolve(tx3) assert manager.propagate_tx(tx3, False) for _ in range(100): diff --git a/tests/tx/test_mining.py b/tests/tx/test_mining.py index 822231907..3c55bc527 100644 --- a/tests/tx/test_mining.py +++ b/tests/tx/test_mining.py @@ -2,10 +2,10 @@ from hathor.conf import HathorSettings from hathor.mining import BlockTemplate +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Block, sum_weights from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest -from tests.utils import add_new_blocks settings = HathorSettings() @@ -37,13 +37,19 @@ def test_block_template_after_genesis(self) -> None: block_templates = manager.get_block_templates() self.assertEqual(len(block_templates), 1) + + timestamp_max = min( + 0xffffffff, + int(manager.reactor.seconds()) + self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED + ) + self.assertEqual(block_templates[0], BlockTemplate( versions={0, 3}, reward=settings.INITIAL_TOKEN_UNITS_PER_BLOCK * 100, weight=1.0, timestamp_now=int(manager.reactor.seconds()), timestamp_min=settings.GENESIS_BLOCK_TIMESTAMP + 3, - timestamp_max=0xffffffff, # no limit for next block after genesis + timestamp_max=timestamp_max, # no limit for next block after genesis # parents=[tx.hash for tx in self.genesis_blocks + self.genesis_txs], parents=block_templates[0].parents, parents_any=[], @@ -60,13 +66,19 @@ def test_regular_block_template(self) -> None: block_templates = manager.get_block_templates() self.assertEqual(len(block_templates), 1) + + timestamp_max = min( + blocks[-1].timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1, + int(manager.reactor.seconds()) + self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED + ) + self.assertEqual(block_templates[0], BlockTemplate( versions={0, 3}, reward=settings.INITIAL_TOKEN_UNITS_PER_BLOCK * 100, weight=1.0, timestamp_now=int(manager.reactor.seconds()), timestamp_min=blocks[-1].timestamp + 1, - timestamp_max=blocks[-1].timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1, + timestamp_max=timestamp_max, # parents=[blocks[-1].hash, self.genesis_txs[-1].hash, self.genesis_txs[-2].hash], parents=block_templates[0].parents, parents_any=[], diff --git a/tests/tx/test_multisig.py b/tests/tx/test_multisig.py index 82e257052..e6a56105c 100644 --- a/tests/tx/test_multisig.py +++ b/tests/tx/test_multisig.py @@ -2,13 +2,14 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address, get_private_key_from_bytes, get_public_key_bytes_compressed +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.exceptions import ScriptError from hathor.transaction.scripts import P2PKH, MultiSig, create_output_script, parse_address_script, script_eval from hathor.wallet.base_wallet import WalletBalance, WalletOutputInfo from hathor.wallet.util import generate_multisig_address, generate_multisig_redeem_script, generate_signature from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks +from tests.utils import add_blocks_unlock_reward settings = HathorSettings() @@ -72,7 +73,7 @@ def test_spend_multisig(self): tx1.weight = 10 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.manager.propagate_tx(tx1) self.clock.advance(10) @@ -104,13 +105,13 @@ def test_spend_multisig(self): input_data = MultiSig.create_input_data(self.redeem_script, signatures) tx.inputs[0].data = input_data - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) # Transaction is still locked self.assertFalse(self.manager.propagate_tx(tx)) self.clock.advance(6) tx.timestamp = int(self.clock.seconds()) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) # First we try to propagate with a P2PKH input private_key_obj = get_private_key_from_bytes(bytes.fromhex(self.private_keys[0]), password=b'1234') @@ -119,7 +120,7 @@ def test_spend_multisig(self): p2pkh_input_data = P2PKH.create_input_data(public_key_compressed, signatures[0]) tx2 = Transaction.create_from_struct(tx.get_struct()) tx2.inputs[0].data = p2pkh_input_data - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.assertFalse(self.manager.propagate_tx(tx2)) # Now we propagate the correct diff --git a/tests/tx/test_prometheus.py b/tests/tx/test_prometheus.py index 8eaca8824..529b7e48f 100644 --- a/tests/tx/test_prometheus.py +++ b/tests/tx/test_prometheus.py @@ -6,8 +6,9 @@ import pytest from hathor.prometheus import PrometheusMetricsExporter +from hathor.simulator.utils import add_new_blocks from tests import unittest -from tests.utils import add_new_blocks, add_new_transactions +from tests.utils import add_new_transactions class BasePrometheusTest(unittest.TestCase): diff --git a/tests/tx/test_reward_lock.py b/tests/tx/test_reward_lock.py index ceb27b90f..14f709a69 100644 --- a/tests/tx/test_reward_lock.py +++ b/tests/tx/test_reward_lock.py @@ -2,13 +2,14 @@ from hathor.conf import HathorSettings from hathor.crypto.util import get_address_from_public_key +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.exceptions import RewardLocked from hathor.transaction.scripts import P2PKH from hathor.transaction.storage import TransactionMemoryStorage from hathor.wallet import Wallet from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, get_genesis_key +from tests.utils import add_blocks_unlock_reward, get_genesis_key settings = HathorSettings() @@ -37,7 +38,7 @@ def _add_reward_block(self): reward_block = self.manager.generate_mining_block( address=get_address_from_public_key(self.genesis_public_key) ) - reward_block.resolve() + self.manager.cpu_mining_service.resolve(reward_block) self.assertTrue(self.manager.propagate_tx(reward_block)) # XXX: calculate unlock height AFTER adding the block so the height is correctly calculated unlock_height = reward_block.get_metadata().height + settings.REWARD_SPEND_MIN_BLOCKS + 1 @@ -60,7 +61,7 @@ def _spend_reward_tx(self, manager, reward_block): data_to_sign = tx.get_sighash_all() public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) input_.data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) tx.update_initial_metadata(save=False) return tx @@ -160,7 +161,7 @@ def test_mempool_tx_invalid_after_reorg(self): tb0 = self.manager.make_custom_block_template(block_to_replace.parents[0], block_to_replace.parents[1:]) b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage) b0.weight = 10 - b0.resolve() + self.manager.cpu_mining_service.resolve(b0) self.manager.verification_service.verify(b0) self.manager.propagate_tx(b0, fails_silently=False) @@ -187,7 +188,7 @@ def test_classic_reward_lock_timestamp_expected_to_fail(self): # be greater, so it'll fail tx = self._spend_reward_tx(self.manager, reward_block) tx.timestamp = blocks[-1].timestamp - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) self.assertEqual(tx.get_metadata().min_height, unlock_height) with self.assertRaises(RewardLocked): self.manager.verification_service.verify(tx) diff --git a/tests/tx/test_scripts.py b/tests/tx/test_scripts.py index 38928beed..b6cf99566 100644 --- a/tests/tx/test_scripts.py +++ b/tests/tx/test_scripts.py @@ -1,4 +1,5 @@ import struct +from unittest.mock import Mock from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec @@ -23,18 +24,20 @@ MultiSig, Opcode, ScriptExtras, - binary_to_int, - count_sigops, create_base_script, create_output_script, +) +from hathor.transaction.scripts.construct import count_sigops, get_pushdata, get_sigops_count, re_compile +from hathor.transaction.scripts.execute import ( + binary_to_int, decode_opn, evaluate_final_stack, get_data_bytes, get_data_single_byte, get_data_value, - get_pushdata, get_script_op, - get_sigops_count, +) +from hathor.transaction.scripts.opcode import ( op_checkdatasig, op_checkmultisig, op_checksig, @@ -51,8 +54,8 @@ op_integer, op_pushdata, op_pushdata1, - re_compile, ) +from hathor.transaction.scripts.script_context import ScriptContext from hathor.transaction.storage import TransactionMemoryStorage from hathor.wallet import HDWallet from tests import unittest @@ -173,22 +176,22 @@ def test_pushdata1(self): def test_dup(self): with self.assertRaises(MissingStackItems): - op_dup([], log=[], extras=None) + op_dup(ScriptContext(stack=[], logs=[], extras=Mock())) stack = [1] - op_dup(stack, log=[], extras=None) + op_dup(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack[-1], stack[-2]) def test_equalverify(self): elem = b'a' with self.assertRaises(MissingStackItems): - op_equalverify([elem], log=[], extras=None) + op_equalverify(ScriptContext(stack=[elem], logs=[], extras=Mock())) # no exception should be raised - op_equalverify([elem, elem], log=[], extras=None) + op_equalverify(ScriptContext(stack=[elem, elem], logs=[], extras=Mock())) with self.assertRaises(EqualVerifyFailed): - op_equalverify([elem, b'aaaa'], log=[], extras=None) + op_equalverify(ScriptContext(stack=[elem, b'aaaa'], logs=[], extras=Mock())) def test_checksig_raise_on_uncompressed_pubkey(self): """ Uncompressed pubkeys shoud not be accepted, even if they solve the signature @@ -210,11 +213,11 @@ def test_checksig_raise_on_uncompressed_pubkey(self): # ScriptError if pubkey is not a valid compressed public key # with wrong signature with self.assertRaises(ScriptError): - op_checksig([b'123', pubkey_uncompressed], log=[], extras=None) + op_checksig(ScriptContext(stack=[b'123', pubkey_uncompressed], logs=[], extras=Mock())) # or with rigth one # this will make sure the signature is not made when parameters are wrong with self.assertRaises(ScriptError): - op_checksig([signature, pubkey_uncompressed], log=[], extras=None) + op_checksig(ScriptContext(stack=[signature, pubkey_uncompressed], logs=[], extras=Mock())) def test_checksig_check_for_compressed_pubkey(self): """ Compressed pubkeys bytes representation always start with a byte 2 or 3 @@ -223,19 +226,19 @@ def test_checksig_check_for_compressed_pubkey(self): """ # ScriptError if pubkey is not a public key but starts with 2 or 3 with self.assertRaises(ScriptError): - op_checksig([b'\x0233', b'\x0233'], log=[], extras=None) + op_checksig(ScriptContext(stack=[b'\x0233', b'\x0233'], logs=[], extras=Mock())) with self.assertRaises(ScriptError): - op_checksig([b'\x0321', b'\x0321'], log=[], extras=None) + op_checksig(ScriptContext(stack=[b'\x0321', b'\x0321'], logs=[], extras=Mock())) # ScriptError if pubkey does not start with 2 or 3 with self.assertRaises(ScriptError): - op_checksig([b'\x0123', b'\x0123'], log=[], extras=None) + op_checksig(ScriptContext(stack=[b'\x0123', b'\x0123'], logs=[], extras=Mock())) with self.assertRaises(ScriptError): - op_checksig([b'\x0423', b'\x0423'], log=[], extras=None) + op_checksig(ScriptContext(stack=[b'\x0423', b'\x0423'], logs=[], extras=Mock())) def test_checksig(self): with self.assertRaises(MissingStackItems): - op_checksig([1], log=[], extras=None) + op_checksig(ScriptContext(stack=[1], logs=[], extras=Mock())) block = self.genesis_blocks[0] @@ -250,15 +253,15 @@ def test_checksig(self): signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = ScriptExtras(tx=tx, txin=None, spent_tx=None) + extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) # wrong signature puts False (0) on stack stack = [b'aaaaaaaaa', pubkey_bytes] - op_checksig(stack, log=[], extras=extras) + op_checksig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(0, stack.pop()) stack = [signature, pubkey_bytes] - op_checksig(stack, log=[], extras=extras) + op_checksig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(1, stack.pop()) def test_checksig_cache(self): @@ -275,22 +278,22 @@ def test_checksig_cache(self): signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = ScriptExtras(tx=tx, txin=None, spent_tx=None) + extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) stack = [signature, pubkey_bytes] self.assertIsNone(tx._sighash_data_cache) - op_checksig(stack, log=[], extras=extras) + op_checksig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertIsNotNone(tx._sighash_data_cache) self.assertEqual(1, stack.pop()) def test_hash160(self): with self.assertRaises(MissingStackItems): - op_hash160([], log=[], extras=None) + op_hash160(ScriptContext(stack=[], logs=[], extras=Mock())) elem = b'aaaaaaaa' hash160 = get_hash160(elem) stack = [elem] - op_hash160(stack, log=[], extras=None) + op_hash160(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(hash160, stack.pop()) def test_checkdatasig_raise_on_uncompressed_pubkey(self): @@ -313,27 +316,27 @@ def test_checkdatasig_raise_on_uncompressed_pubkey(self): # with wrong signature stack = [data, b'123', pubkey_uncompressed] with self.assertRaises(ScriptError): - op_checkdatasig(stack, log=[], extras=None) + op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) # or with rigth one # this will make sure the signature is not made when parameters are wrong stack = [data, signature, pubkey_uncompressed] with self.assertRaises(ScriptError): - op_checkdatasig(stack, log=[], extras=None) + op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) def test_checkdatasig_check_for_compressed_pubkey(self): # ScriptError if pubkey is not a public key but starts with 2 or 3 with self.assertRaises(ScriptError): - op_checkdatasig([b'\x0233', b'\x0233', b'\x0233'], log=[], extras=None) + op_checkdatasig(ScriptContext(stack=[b'\x0233', b'\x0233', b'\x0233'], logs=[], extras=Mock())) with self.assertRaises(ScriptError): - op_checkdatasig([b'\x0321', b'\x0321', b'\x0321'], log=[], extras=None) + op_checkdatasig(ScriptContext(stack=[b'\x0321', b'\x0321', b'\x0321'], logs=[], extras=Mock())) # ScriptError if pubkey is not a public key with self.assertRaises(ScriptError): - op_checkdatasig([b'\x0123', b'\x0123', b'\x0123'], log=[], extras=None) + op_checkdatasig(ScriptContext(stack=[b'\x0123', b'\x0123', b'\x0123'], logs=[], extras=Mock())) def test_checkdatasig(self): with self.assertRaises(MissingStackItems): - op_checkdatasig([1, 1], log=[], extras=None) + op_checkdatasig(ScriptContext(stack=[1, 1], logs=[], extras=Mock())) data = b'some_random_data' signature = self.genesis_private_key.sign(data, ec.ECDSA(hashes.SHA256())) @@ -341,12 +344,12 @@ def test_checkdatasig(self): stack = [data, signature, pubkey_bytes] # no exception should be raised and data is left on stack - op_checkdatasig(stack, log=[], extras=None) + op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(data, stack.pop()) stack = [b'data_not_matching', signature, pubkey_bytes] with self.assertRaises(OracleChecksigFailed): - op_checkdatasig(stack, log=[], extras=None) + op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) def test_get_data_value(self): value0 = b'value0' @@ -367,7 +370,7 @@ def test_get_data_value(self): def test_data_strequal(self): with self.assertRaises(MissingStackItems): - op_data_strequal([1, 1], log=[], extras=None) + op_data_strequal(ScriptContext(stack=[1, 1], logs=[], extras=Mock())) value0 = b'value0' value1 = b'vvvalue1' @@ -376,20 +379,20 @@ def test_data_strequal(self): data = (bytes([len(value0)]) + value0 + bytes([len(value1)]) + value1 + bytes([len(value2)]) + value2) stack = [data, 0, value0] - op_data_strequal(stack, log=[], extras=None) + op_data_strequal(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), data) stack = [data, 1, value0] with self.assertRaises(VerifyFailed): - op_data_strequal(stack, log=[], extras=None) + op_data_strequal(ScriptContext(stack=stack, logs=[], extras=Mock())) stack = [data, b'\x00', value0] with self.assertRaises(VerifyFailed): - op_data_strequal(stack, log=[], extras=None) + op_data_strequal(ScriptContext(stack=stack, logs=[], extras=Mock())) def test_data_greaterthan(self): with self.assertRaises(MissingStackItems): - op_data_greaterthan([1, 1], log=[], extras=None) + op_data_greaterthan(ScriptContext(stack=[1, 1], logs=[], extras=Mock())) value0 = struct.pack('!I', 1000) value1 = struct.pack('!I', 1) @@ -397,93 +400,93 @@ def test_data_greaterthan(self): data = (bytes([len(value0)]) + value0 + bytes([len(value1)]) + value1) stack = [data, 0, struct.pack('!I', 999)] - op_data_greaterthan(stack, log=[], extras=None) + op_data_greaterthan(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), data) stack = [data, 1, struct.pack('!I', 0)] - op_data_greaterthan(stack, log=[], extras=None) + op_data_greaterthan(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), data) with self.assertRaises(VerifyFailed): stack = [data, 1, struct.pack('!I', 1)] - op_data_greaterthan(stack, log=[], extras=None) + op_data_greaterthan(ScriptContext(stack=stack, logs=[], extras=Mock())) stack = [data, 1, b'not_an_int'] with self.assertRaises(VerifyFailed): - op_data_greaterthan(stack, log=[], extras=None) + op_data_greaterthan(ScriptContext(stack=stack, logs=[], extras=Mock())) stack = [data, b'\x00', struct.pack('!I', 0)] with self.assertRaises(VerifyFailed): - op_data_greaterthan(stack, log=[], extras=None) + op_data_greaterthan(ScriptContext(stack=stack, logs=[], extras=Mock())) def test_data_match_interval(self): with self.assertRaises(MissingStackItems): - op_data_match_interval([1, b'2'], log=[], extras=None) + op_data_match_interval([1, b'2']) value0 = struct.pack('!I', 1000) data = (bytes([len(value0)]) + value0) stack = [data, 0, 'key1', struct.pack('!I', 1000), 'key2', struct.pack('!I', 1005), 'key3', bytes([2])] - op_data_match_interval(stack, log=[], extras=None) + op_data_match_interval(stack) self.assertEqual(stack.pop(), 'key1') self.assertEqual(len(stack), 0) stack = [data, 0, 'key1', struct.pack('!I', 100), 'key2', struct.pack('!I', 1005), 'key3', bytes([2])] - op_data_match_interval(stack, log=[], extras=None) + op_data_match_interval(stack) self.assertEqual(stack.pop(), 'key2') self.assertEqual(len(stack), 0) stack = [data, 0, 'key1', struct.pack('!I', 100), 'key2', struct.pack('!I', 900), 'key3', bytes([2])] - op_data_match_interval(stack, log=[], extras=None) + op_data_match_interval(stack) self.assertEqual(stack.pop(), 'key3') self.assertEqual(len(stack), 0) # missing 1 item on stack stack = [data, 0, struct.pack('!I', 100), 'key2', struct.pack('!I', 900), 'key3', bytes([2])] with self.assertRaises(MissingStackItems): - op_data_match_interval(stack, log=[], extras=None) + op_data_match_interval(stack) # value should be an integer stack = [data, 0, 'key1', struct.pack('!I', 100), 'key2', b'not_an_int', 'key3', bytes([2])] with self.assertRaises(VerifyFailed): - op_data_match_interval(stack, log=[], extras=None) + op_data_match_interval(stack) def test_data_match_value(self): with self.assertRaises(MissingStackItems): - op_data_match_value([1, b'2'], log=[], extras=None) + op_data_match_value(ScriptContext(stack=[1, b'2'], logs=[], extras=Mock())) value0 = struct.pack('!I', 1000) data = (bytes([len(value0)]) + value0) stack = [data, 0, 'key1', struct.pack('!I', 1000), 'key2', struct.pack('!I', 1005), 'key3', bytes([2])] - op_data_match_value(stack, log=[], extras=None) + op_data_match_value(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 'key2') self.assertEqual(len(stack), 0) stack = [data, 0, 'key1', struct.pack('!I', 999), 'key2', struct.pack('!I', 1000), 'key3', bytes([2])] - op_data_match_value(stack, log=[], extras=None) + op_data_match_value(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 'key3') self.assertEqual(len(stack), 0) # missing 1 item on stack stack = [data, 0, 'key1', struct.pack('!I', 1000), 'key2', struct.pack('!I', 1000), bytes([2])] with self.assertRaises(MissingStackItems): - op_data_match_value(stack, log=[], extras=None) + op_data_match_value(ScriptContext(stack=stack, logs=[], extras=Mock())) # no value matches stack = [data, 0, 'key1', struct.pack('!I', 999), 'key2', struct.pack('!I', 1111), 'key3', bytes([2])] - op_data_match_value(stack, log=[], extras=None) + op_data_match_value(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 'key1') self.assertEqual(len(stack), 0) # value should be an integer stack = [data, 0, 'key1', struct.pack('!I', 100), 'key2', b'not_an_int', 'key3', bytes([2])] with self.assertRaises(VerifyFailed): - op_data_match_value(stack, log=[], extras=None) + op_data_match_value(ScriptContext(stack=stack, logs=[], extras=Mock())) def test_find_p2pkh(self): with self.assertRaises(MissingStackItems): - op_find_p2pkh([], log=[], extras=None) + op_find_p2pkh(ScriptContext(stack=[], logs=[], extras=Mock())) addr1 = '15d14K5jMqsN2uwUEFqiPG5SoD7Vr1BfnH' addr2 = '1K35zJQeYrVzQAW7X3s7vbPKmngj5JXTBc' @@ -506,14 +509,14 @@ def test_find_p2pkh(self): stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out_genesis)]) extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) - op_find_p2pkh(stack, log=[], extras=extras) + op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs and correct output among them stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out_genesis), TxOutput(1, out3)]) extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) - op_find_p2pkh(stack, log=[], extras=extras) + op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs without correct amount output @@ -521,18 +524,18 @@ def test_find_p2pkh(self): tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(2, out_genesis), TxOutput(1, out3)]) extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) with self.assertRaises(VerifyFailed): - op_find_p2pkh(stack, log=[], extras=extras) + op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) # several outputs without correct address output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out3)]) extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) with self.assertRaises(VerifyFailed): - op_find_p2pkh(stack, log=[], extras=extras) + op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) def test_greaterthan_timestamp(self): with self.assertRaises(MissingStackItems): - op_greaterthan_timestamp([], log=[], extras=None) + op_greaterthan_timestamp(ScriptContext(stack=[], logs=[], extras=Mock())) timestamp = 1234567 @@ -540,23 +543,23 @@ def test_greaterthan_timestamp(self): tx = Transaction() stack = [struct.pack('!I', timestamp)] - extras = ScriptExtras(tx=tx, txin=None, spent_tx=None) + extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) with self.assertRaises(TimeLocked): tx.timestamp = timestamp - 1 - op_greaterthan_timestamp(list(stack), log=[], extras=extras) + op_greaterthan_timestamp(ScriptContext(stack=list(stack), logs=[], extras=extras)) with self.assertRaises(TimeLocked): tx.timestamp = timestamp - op_greaterthan_timestamp(list(stack), log=[], extras=extras) + op_greaterthan_timestamp(ScriptContext(stack=list(stack), logs=[], extras=extras)) tx.timestamp = timestamp + 1 - op_greaterthan_timestamp(stack, log=[], extras=extras) + op_greaterthan_timestamp(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(len(stack), 0) def test_checkmultisig(self): with self.assertRaises(MissingStackItems): - op_checkmultisig([], log=[], extras=None) + op_checkmultisig(ScriptContext(stack=[], logs=[], extras=Mock())) block = self.genesis_blocks[0] @@ -566,7 +569,7 @@ def test_checkmultisig(self): tx = Transaction(inputs=[txin], outputs=[txout]) data_to_sign = tx.get_sighash_all() - extras = ScriptExtras(tx=tx, txin=None, spent_tx=None) + extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) wallet = HDWallet() wallet._manually_initialize() @@ -595,107 +598,107 @@ def test_checkmultisig(self): stack = [ keys[0]['signature'], keys[2]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(1, stack.pop()) # New set of valid signatures stack = [ keys[0]['signature'], keys[1]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(1, stack.pop()) # Changing the signatures but they match stack = [ keys[1]['signature'], keys[2]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(1, stack.pop()) # Signatures are valid but in wrong order stack = [ keys[1]['signature'], keys[0]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(0, stack.pop()) # Adding wrong signature, so we get error stack = [ keys[0]['signature'], wrong_key['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(0, stack.pop()) # Adding same signature twice, so we get error stack = [ keys[0]['signature'], keys[0]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(0, stack.pop()) # Adding less signatures than required, so we get error stack = [keys[0]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3] with self.assertRaises(MissingStackItems): - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) # Quantity of signatures is more than it should stack = [ keys[0]['signature'], keys[1]['signature'], 3, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] with self.assertRaises(MissingStackItems): - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) # Quantity of pubkeys is more than it should stack = [ keys[0]['signature'], keys[1]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 4 ] with self.assertRaises(InvalidStackData): - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) # Exception pubkey_count should be integer stack = [ keys[0]['signature'], keys[1]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], '3' ] with self.assertRaises(InvalidStackData): - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) # Exception not enough pub keys stack = [keys[0]['pubkey'], keys[1]['pubkey'], 3] with self.assertRaises(MissingStackItems): - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) # Exception stack empty after pubkeys stack = [keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3] with self.assertRaises(MissingStackItems): - op_checkmultisig(stack, log=[], extras=extras) + op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) def test_equal(self): elem = b'a' with self.assertRaises(MissingStackItems): - op_equal([elem], log=[], extras=None) + op_equal(ScriptContext(stack=[elem], logs=[], extras=Mock())) # no exception should be raised stack = [elem, elem] - op_equal(stack, log=[], extras=None) + op_equal(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 1) stack = [elem, b'aaaa'] - op_equal(stack, log=[], extras=None) + op_equal(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 0) def test_integer_opcode(self): # We have opcodes from OP_0 to OP_16 for i in range(0, 17): stack = [] - op_integer(getattr(Opcode, 'OP_{}'.format(i)), stack, [], None) + op_integer(getattr(Opcode, 'OP_{}'.format(i)), stack) self.assertEqual(stack, [i]) stack = [] with self.assertRaises(ScriptError): - op_integer(0, stack, [], None) + op_integer(0, stack) with self.assertRaises(ScriptError): - op_integer(0x61, stack, [], None) + op_integer(0x61, stack) def test_decode_opn(self): for i in range(0, 17): diff --git a/tests/tx/test_stratum.py b/tests/tx/test_stratum.py index f059aacb8..47331a341 100644 --- a/tests/tx/test_stratum.py +++ b/tests/tx/test_stratum.py @@ -256,7 +256,7 @@ def setUp(self): storage = TransactionMemoryStorage() self.block = storage.get_transaction(self._settings.GENESIS_BLOCK_HASH) self.transport = StringTransportWithDisconnection() - self.protocol = StratumClient() + self.protocol = StratumClient(reactor=self.clock) self.protocol.makeConnection(self.transport) self.job_request_params = { 'data': self.block.get_header_without_nonce().hex(), diff --git a/tests/tx/test_timelock.py b/tests/tx/test_timelock.py index ed85a9396..711d46cff 100644 --- a/tests/tx/test_timelock.py +++ b/tests/tx/test_timelock.py @@ -1,10 +1,11 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.wallet.base_wallet import WalletBalance, WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import InsufficientFunds from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks +from tests.utils import add_blocks_unlock_reward settings = HathorSettings() @@ -41,7 +42,7 @@ def test_timelock(self): tx1.weight = 10 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.manager.propagate_tx(tx1) self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], @@ -60,7 +61,7 @@ def test_timelock(self): tx2.weight = 10 tx2.parents = self.manager.get_new_tx_parents() tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) propagated = self.manager.propagate_tx(tx2) self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], @@ -80,7 +81,7 @@ def test_timelock(self): tx3.weight = 10 tx3.parents = self.manager.get_new_tx_parents() tx3.timestamp = int(self.clock.seconds()) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) propagated = self.manager.propagate_tx(tx3, False) self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500 - 700)) @@ -100,7 +101,7 @@ def test_timelock(self): tx4.weight = 10 tx4.parents = self.manager.get_new_tx_parents() tx4.timestamp = int(self.clock.seconds()) - tx4.resolve() + self.manager.cpu_mining_service.resolve(tx4) propagated = self.manager.propagate_tx(tx4, False) self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens[:3]))) @@ -108,7 +109,7 @@ def test_timelock(self): self.clock.advance(8) tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) propagated = self.manager.propagate_tx(tx2, False) self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], WalletBalance(0, sum(blocks_tokens[:3]))) @@ -131,7 +132,7 @@ def test_choose_inputs(self): tx1.weight = 10 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) self.manager.propagate_tx(tx1) self.clock.advance(1) @@ -149,7 +150,7 @@ def test_choose_inputs(self): tx2.weight = 10 tx2.parents = self.manager.get_new_tx_parents() tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.propagate_tx(tx2) self.assertEqual(self.manager.wallet.balance[settings.HATHOR_TOKEN_UID], diff --git a/tests/tx/test_tips.py b/tests/tx/test_tips.py index 6242bebf5..9fbc0af46 100644 --- a/tests/tx/test_tips.py +++ b/tests/tx/test_tips.py @@ -1,12 +1,7 @@ +from hathor.simulator.utils import add_new_block, add_new_blocks from hathor.transaction import Transaction from tests import unittest -from tests.utils import ( - add_blocks_unlock_reward, - add_new_block, - add_new_blocks, - add_new_double_spending, - add_new_transactions, -) +from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions class BaseTipsTestCase(unittest.TestCase): @@ -53,7 +48,7 @@ def test_tips_winner(self): tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.parents = [tx2.parents[1], tx2.parents[0]] - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) # Propagate a conflicting twin transaction with tx2 self.manager.propagate_tx(tx3) @@ -69,7 +64,7 @@ def test_tips_winner(self): # Creating a new block that confirms tx3, then is will become valid and voiding tx2 new_block = add_new_block(self.manager, propagate=False) new_block.parents = [new_block.parents[0], tx1.hash, tx3.hash] - new_block.resolve() + self.manager.cpu_mining_service.resolve(new_block) self.manager.verification_service.verify(new_block) self.manager.propagate_tx(new_block, fails_silently=False) @@ -138,7 +133,7 @@ def test_tips_twin(self): # A new tx with custom parents, so tx3 and tx4 will become two tips tx4 = add_new_transactions(self.manager, 1, advance_clock=1, propagate=False)[0] tx4.parents = [tx1.hash, tx2.hash] - tx4.resolve() + self.manager.cpu_mining_service.resolve(tx4) self.manager.propagate_tx(tx4, fails_silently=False) self.manager.reactor.advance(10) self.assertCountEqual(self.get_tips(), set([tx4.hash, tx3.hash])) @@ -146,7 +141,7 @@ def test_tips_twin(self): # A twin tx with tx4, that will be voided initially, then won't change the tips tx5 = Transaction.create_from_struct(tx4.get_struct()) tx5.parents = [tx2.hash, tx3.hash] - tx5.resolve() + self.manager.cpu_mining_service.resolve(tx5) self.manager.propagate_tx(tx5) self.manager.reactor.advance(10) @@ -158,7 +153,7 @@ def test_tips_twin(self): # add new tx confirming tx5, which will become valid and tx4 becomes voided tx6 = add_new_transactions(self.manager, 1, advance_clock=1, propagate=False)[0] tx6.parents = [tx5.hash, tx2.hash] - tx6.resolve() + self.manager.cpu_mining_service.resolve(tx6) self.manager.propagate_tx(tx6, fails_silently=False) self.manager.reactor.advance(10) self.assertIsNotNone(tx4.get_metadata(force_reload=True).voided_by) diff --git a/tests/tx/test_tokens.py b/tests/tx/test_tokens.py index b3b036f2f..f4626e3f8 100644 --- a/tests/tx/test_tokens.py +++ b/tests/tx/test_tokens.py @@ -52,7 +52,7 @@ def test_tokens_in_block(self): weight=1, # low weight so we don't waste time with PoW storage=self.manager.tx_storage) - block.resolve() + self.manager.cpu_mining_service.resolve(block) with self.assertRaises(BlockWithTokensError): self.manager.verification_service.verify(block) @@ -72,7 +72,7 @@ def test_tx_token_outputs(self): data_to_sign = tx.get_sighash_all() public_bytes, signature = self.manager.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): self.manager.verification_service.verify(tx) @@ -82,7 +82,7 @@ def test_tx_token_outputs(self): data_to_sign = tx.get_sighash_all() public_bytes, signature = self.manager.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): self.manager.verification_service.verify(tx) @@ -92,7 +92,7 @@ def test_tx_token_outputs(self): data_to_sign = tx.get_sighash_all() public_bytes, signature = self.manager.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): self.manager.verification_service.verify(tx) @@ -113,7 +113,7 @@ def test_token_transfer(self): data_to_sign = tx2.get_sighash_all() public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.verification_service.verify(tx2) # missing tokens @@ -123,7 +123,7 @@ def test_token_transfer(self): data_to_sign = tx3.get_sighash_all() public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx3.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InputOutputMismatch): self.manager.verification_service.verify(tx3) @@ -156,7 +156,7 @@ def test_token_mint(self): data = P2PKH.create_input_data(public_bytes, signature) tx2.inputs[0].data = data tx2.inputs[1].data = data - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -191,7 +191,7 @@ def test_token_mint(self): public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) data = P2PKH.create_input_data(public_bytes, signature) tx3.inputs[0].data = data - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InputOutputMismatch): self.manager.verification_service.verify(tx3) @@ -217,7 +217,7 @@ def test_token_mint(self): data = P2PKH.create_input_data(public_bytes, signature) tx4.inputs[0].data = data tx4.inputs[1].data = data - tx4.resolve() + self.manager.cpu_mining_service.resolve(tx4) with self.assertRaises(InputOutputMismatch): self.manager.verification_service.verify(tx4) @@ -229,7 +229,7 @@ def test_token_mint(self): data_to_sign = tx5.get_sighash_all() public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx5.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx5.resolve() + self.manager.cpu_mining_service.resolve(tx5) with self.assertRaises(InputOutputMismatch): self.manager.verification_service.verify(tx5) @@ -263,7 +263,7 @@ def test_token_melt(self): data = P2PKH.create_input_data(public_bytes, signature) tx2.inputs[0].data = data tx2.inputs[1].data = data - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -302,7 +302,7 @@ def test_token_melt(self): data = P2PKH.create_input_data(public_bytes, signature) tx3.inputs[0].data = data tx3.inputs[1].data = data - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InputOutputMismatch): self.manager.verification_service.verify(tx3) @@ -317,7 +317,7 @@ def test_token_melt(self): data = P2PKH.create_input_data(public_bytes, signature) tx4.inputs[0].data = data tx4.inputs[1].data = data - tx4.resolve() + self.manager.cpu_mining_service.resolve(tx4) with self.assertRaises(InputOutputMismatch): self.manager.verification_service.verify(tx4) @@ -336,7 +336,7 @@ def test_token_transfer_authority(self): data_to_sign = tx2.get_sighash_all() public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) with self.assertRaises(InvalidToken): self.manager.verification_service.verify(tx2) @@ -348,7 +348,7 @@ def test_token_transfer_authority(self): data_to_sign = tx3.get_sighash_all() public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx3.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InvalidToken): self.manager.verification_service.verify(tx3) @@ -402,7 +402,7 @@ def test_token_index_with_conflict(self, mint_amount=0): tx2.inputs[0].data = data tx2.inputs[1].data = data tx2.inputs[2].data = data - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -422,7 +422,7 @@ def test_token_index_with_conflict(self, mint_amount=0): tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.parents = [tx.parents[1], tx.parents[0]] tx3.weight = 3 - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) self.assertNotEqual(tx3.hash, tx2.hash) self.assertTrue(tx3.weight > tx2.weight) self.manager.propagate_tx(tx3) @@ -447,7 +447,7 @@ def update_tx(tx): data_to_sign = tx.get_sighash_all() public_bytes, signature = self.manager.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) # test token name and symbol tx = create_tokens(self.manager, self.address_b58) @@ -540,7 +540,7 @@ def test_unknown_authority(self): data = P2PKH.create_input_data(public_bytes, signature) tx2.inputs[0].data = data tx2.inputs[1].data = data - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) with self.assertRaises(InvalidToken): self.manager.verification_service.verify(tx2) @@ -593,7 +593,7 @@ def test_block_with_htr_authority(self): weight=1, # low weight so we don't waste time with PoW storage=self.manager.tx_storage) - block.resolve() + self.manager.cpu_mining_service.resolve(block) with self.assertRaises(InvalidToken): self.manager.verification_service.verify(block) diff --git a/tests/tx/test_traversal.py b/tests/tx/test_traversal.py index a4ca58732..9f730c545 100644 --- a/tests/tx/test_traversal.py +++ b/tests/tx/test_traversal.py @@ -1,8 +1,9 @@ from math import inf +from hathor.simulator.utils import add_new_blocks from hathor.transaction.storage.traversal import BFSOrderWalk, BFSTimestampWalk, DFSWalk from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, add_new_transactions, add_new_tx +from tests.utils import add_blocks_unlock_reward, add_new_transactions, add_new_tx class _TraversalTestCase(unittest.TestCase): diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 96cc51ce2..fd802c7f5 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -2,9 +2,9 @@ import hashlib from math import isinf, isnan -from hathor import daa from hathor.crypto.util import decode_address, get_address_from_public_key, get_private_key_from_bytes -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import TestMode +from hathor.simulator.utils import add_new_blocks from hathor.transaction import MAX_OUTPUT_VALUE, Block, Transaction, TxInput, TxOutput from hathor.transaction.exceptions import ( BlockWithInputs, @@ -32,13 +32,7 @@ from hathor.transaction.validation_state import ValidationState from hathor.wallet import Wallet from tests import unittest -from tests.utils import ( - add_blocks_unlock_reward, - add_new_blocks, - add_new_transactions, - create_script_with_sigops, - get_genesis_key, -) +from tests.utils import add_blocks_unlock_reward, add_new_transactions, create_script_with_sigops, get_genesis_key class BaseTransactionTest(unittest.TestCase): @@ -50,6 +44,7 @@ def setUp(self): # this makes sure we can spend the genesis outputs self.manager = self.create_peer('testnet', unlock_wallet=True, wallet_index=True, use_memory_storage=True) + self._verifiers = self.manager.verification_service.verifiers self.tx_storage = self.manager.tx_storage # read genesis keys @@ -80,7 +75,7 @@ def test_input_output_match(self): _input.data = P2PKH.create_input_data(public_bytes, signature) with self.assertRaises(InputOutputMismatch): - tx.verify_sum() + self._verifiers.tx.verify_sum(tx.get_complete_token_info()) def test_validation(self): # add 100 blocks and check that walking through get_next_block_best_chain yields the same blocks @@ -120,7 +115,7 @@ def test_script(self): _input.data = data_wrong with self.assertRaises(InvalidInputData): - tx.verify_inputs() + self._verifiers.tx.verify_inputs(tx) def test_too_many_inputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -131,13 +126,13 @@ def test_too_many_inputs(self): tx = Transaction(inputs=inputs, storage=self.tx_storage) with self.assertRaises(TooManyInputs): - tx.verify_number_of_inputs() + self._verifiers.tx.verify_number_of_inputs(tx) def test_no_inputs(self): tx = Transaction(inputs=[], storage=self.tx_storage) with self.assertRaises(NoInputError): - tx.verify_number_of_inputs() + self._verifiers.tx.verify_number_of_inputs(tx) def test_too_many_outputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -148,7 +143,7 @@ def test_too_many_outputs(self): tx = Transaction(outputs=outputs, storage=self.tx_storage) with self.assertRaises(TooManyOutputs): - tx.verify_number_of_outputs() + self._verifiers.vertex.verify_number_of_outputs(tx) def _gen_tx_spending_genesis_block(self): parents = [tx.hash for tx in self.genesis_txs] @@ -216,7 +211,7 @@ def test_block_inputs(self): block.inputs = tx_inputs - block.resolve() + self.manager.cpu_mining_service.resolve(block) with self.assertRaises(BlockWithInputs): self.manager.verification_service.verify(block) @@ -246,11 +241,11 @@ def test_merge_mined_no_magic(self): ) with self.assertRaises(AuxPowNoMagicError): - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) # adding the MAGIC_NUMBER makes it work: b.aux_pow = b.aux_pow._replace(coinbase_head=b.aux_pow.coinbase_head + MAGIC_NUMBER) - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) def test_merge_mined_multiple_magic(self): from hathor.merged_mining import MAGIC_NUMBER @@ -312,9 +307,9 @@ def test_merge_mined_multiple_magic(self): assert bytes(b1) != bytes(b2) assert b1.calculate_hash() == b2.calculate_hash() - b1.verify_aux_pow() # OK + self._verifiers.merge_mined_block.verify_aux_pow(b1) # OK with self.assertRaises(AuxPowUnexpectedMagicError): - b2.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b2) def test_merge_mined_long_merkle_path(self): from hathor.merged_mining import MAGIC_NUMBER @@ -341,11 +336,11 @@ def test_merge_mined_long_merkle_path(self): ) with self.assertRaises(AuxPowLongMerklePathError): - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) # removing one path makes it work b.aux_pow.merkle_path.pop() - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) def test_block_outputs(self): from hathor.transaction.exceptions import TooManyOutputs @@ -365,7 +360,7 @@ def test_block_outputs(self): storage=self.tx_storage) with self.assertRaises(TooManyOutputs): - block.verify_outputs() + self._verifiers.vertex.verify_outputs(block) def test_tx_number_parents(self): genesis_block = self.genesis_blocks[0] @@ -386,21 +381,21 @@ def test_tx_number_parents(self): tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) # in first test, only with 1 parent - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(IncorrectParents): self.manager.verification_service.verify(tx) # test with 3 parents parents = [tx.hash for tx in self.genesis] tx.parents = parents - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(IncorrectParents): self.manager.verification_service.verify(tx) # 2 parents, 1 tx and 1 block parents = [self.genesis_txs[0].hash, self.genesis_blocks[0].hash] tx.parents = parents - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(IncorrectParents): self.manager.verification_service.verify(tx) @@ -419,7 +414,7 @@ def test_block_unknown_parent(self): weight=1, # low weight so we don't waste time with PoW storage=self.tx_storage) - block.resolve() + self.manager.cpu_mining_service.resolve(block) with self.assertRaises(ParentDoesNotExist): self.manager.verification_service.verify(block) @@ -437,7 +432,7 @@ def test_block_number_parents(self): weight=1, # low weight so we don't waste time with PoW storage=self.tx_storage) - block.resolve() + self.manager.cpu_mining_service.resolve(block) with self.assertRaises(IncorrectParents): self.manager.verification_service.verify(block) @@ -460,7 +455,7 @@ def test_tx_inputs_out_of_range(self): tx.inputs[0].data = data # test with an inexistent index - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): self.manager.verification_service.verify(tx) @@ -468,7 +463,7 @@ def test_tx_inputs_out_of_range(self): _input = [TxInput(genesis_block.hash, len(genesis_block.outputs), data)] tx.inputs = _input # test with an inexistent index - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): self.manager.verification_service.verify(tx) @@ -476,7 +471,7 @@ def test_tx_inputs_out_of_range(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') _input = [TxInput(random_bytes, 3, data)] tx.inputs = _input - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): self.manager.verification_service.verify(tx) @@ -499,7 +494,7 @@ def test_tx_inputs_conflict(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) _input.data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(ConflictingInputs): self.manager.verification_service.verify(tx) @@ -521,7 +516,7 @@ def test_regular_tx(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) _input.data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) self.manager.verification_service.verify(tx) def test_tx_weight_too_high(self): @@ -530,11 +525,11 @@ def test_tx_weight_too_high(self): inputs = [TxInput(b'', 0, b'')] tx = Transaction(weight=1, inputs=inputs, outputs=outputs, parents=parents, storage=self.tx_storage, timestamp=self.last_block.timestamp + 1) - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = self.manager.daa.minimum_tx_weight(tx) tx.weight += self._settings.MAX_TX_WEIGHT_DIFF + 0.1 tx.update_hash() with self.assertRaises(WeightError): - tx.verify_weight() + self._verifiers.tx.verify_weight(tx) def test_weight_nan(self): # this should succeed @@ -600,7 +595,7 @@ def test_tx_duplicated_parents(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) _input.data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(DuplicatedParents): self.manager.verification_service.verify(tx) @@ -629,8 +624,8 @@ def test_update_timestamp(self): self.assertEquals(tx.timestamp, ts) def test_propagation_error(self): - _set_test_mode(TestMode.DISABLED) manager = self.create_peer('testnet', unlock_wallet=True) + manager.daa.TEST_MODE = TestMode.DISABLED # 1. propagate genesis genesis_block = self.genesis_blocks[0] @@ -640,20 +635,20 @@ def test_propagation_error(self): # 2. propagate block with weight 1 block = manager.generate_mining_block() block.weight = 1 - block.resolve() + self.manager.cpu_mining_service.resolve(block) self.assertFalse(manager.propagate_tx(block)) # 3. propagate block with wrong amount of tokens block = manager.generate_mining_block() output = TxOutput(1, block.outputs[0].script) block.outputs = [output] - block.resolve() + self.manager.cpu_mining_service.resolve(block) self.assertFalse(manager.propagate_tx(block)) # 4. propagate block from the future block = manager.generate_mining_block() block.timestamp = int(self.clock.seconds()) + self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED + 100 - block.resolve(update_time=False) + manager.cpu_mining_service.resolve(block, update_time=False) self.assertFalse(manager.propagate_tx(block)) def test_tx_methods(self): @@ -682,34 +677,34 @@ def test_tx_methods(self): self.assertFalse(tx_equal.is_genesis) # Pow error - tx2.verify_pow() + self._verifiers.vertex.verify_pow(tx2) tx2.weight = 100 with self.assertRaises(PowError): - tx2.verify_pow() + self._verifiers.vertex.verify_pow(tx2) # Verify parent timestamps - tx2.verify_parents() + self._verifiers.vertex.verify_parents(tx2) tx2_timestamp = tx2.timestamp tx2.timestamp = 2 with self.assertRaises(TimestampError): - tx2.verify_parents() + self._verifiers.vertex.verify_parents(tx2) tx2.timestamp = tx2_timestamp # Verify inputs timestamps - tx2.verify_inputs() + self._verifiers.tx.verify_inputs(tx2) tx2.timestamp = 2 with self.assertRaises(TimestampError): - tx2.verify_inputs() + self._verifiers.tx.verify_inputs(tx2) tx2.timestamp = tx2_timestamp # Validate maximum distance between blocks block = blocks[0] block2 = blocks[1] block2.timestamp = block.timestamp + self._settings.MAX_DISTANCE_BETWEEN_BLOCKS - block2.verify_parents() + self._verifiers.vertex.verify_parents(block2) block2.timestamp += 1 with self.assertRaises(TimestampError): - block2.verify_parents() + self._verifiers.vertex.verify_parents(block2) def test_block_big_nonce(self): block = self.genesis_blocks[0] @@ -718,7 +713,7 @@ def test_block_big_nonce(self): start = 1 << (8 * 12) end = start + 1 << (8*4) - hash = block.start_mining(start, end) + hash = self.manager.cpu_mining_service.start_mining(block, start=start, end=end) assert hash is not None block.hash = hash @@ -802,7 +797,7 @@ def test_output_value(self): _input = TxInput(random_bytes, 0, random_bytes) tx = Transaction(inputs=[_input], outputs=[output], parents=parents, storage=self.tx_storage) with self.assertRaises(InvalidOutputValue): - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) # 'Manually resolving', to validate verify method tx.hash = bytes.fromhex('012cba011be3c29f1c406f9015e42698b97169dbc6652d1f5e4d5c5e83138858') @@ -886,7 +881,8 @@ def _test_txout_script_limit(self, offset): _output = TxOutput(value, script) tx = Transaction(inputs=[_input], outputs=[_output], storage=self.tx_storage) - tx.verify_outputs() + self._verifiers.vertex.verify_outputs(tx) + self._verifiers.tx.verify_output_token_indexes(tx) def test_txout_script_limit_exceeded(self): with self.assertRaises(InvalidOutputScriptSize): @@ -910,7 +906,7 @@ def _test_txin_data_limit(self, offset): outputs=[_output], storage=self.tx_storage ) - tx.verify_inputs(skip_script=True) + self._verifiers.tx.verify_inputs(tx, skip_script=True) def test_txin_data_limit_exceeded(self): with self.assertRaises(InvalidInputDataSize): @@ -943,7 +939,7 @@ def test_wallet_index(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) _input.data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) self.manager.propagate_tx(tx) # This transaction has an output to address_b58, so we need one more element on the index @@ -967,7 +963,7 @@ def test_wallet_index(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) input1.data = P2PKH.create_input_data(public_bytes, signature) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.propagate_tx(tx2) # tx2 has two outputs, for address_b58 and new_address_b58 @@ -990,7 +986,7 @@ def test_wallet_index(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) input2.data = P2PKH.create_input_data(public_bytes, signature) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) self.manager.propagate_tx(tx3) # tx3 has one output, for another new address (output3_address_b58) and it's spending an output of address_b58 @@ -1063,7 +1059,7 @@ def test_sigops_output_single_below_limit(self) -> None: output3 = TxOutput(value, hscript) tx = Transaction(inputs=[_input], outputs=[output3], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_output() + self._verifiers.vertex.verify_sigops_output(tx) def test_sigops_output_multi_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1075,7 +1071,7 @@ def test_sigops_output_multi_below_limit(self) -> None: output4 = TxOutput(value, hscript) tx = Transaction(inputs=[_input], outputs=[output4]*num_outputs, storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_output() + self._verifiers.vertex.verify_sigops_output(tx) def test_sigops_input_single_above_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1117,7 +1113,7 @@ def test_sigops_input_single_below_limit(self) -> None: input3 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input3], outputs=[_output], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_input() + self._verifiers.tx.verify_sigops_input(tx) def test_sigops_input_multi_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1131,7 +1127,7 @@ def test_sigops_input_multi_below_limit(self) -> None: input4 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input4]*num_inputs, outputs=[_output], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_input() + self._verifiers.tx.verify_sigops_input(tx) def test_compare_bytes_equal(self) -> None: # create some block diff --git a/tests/tx/test_tx_deserialization.py b/tests/tx/test_tx_deserialization.py index 7e15598f3..4e878c802 100644 --- a/tests/tx/test_tx_deserialization.py +++ b/tests/tx/test_tx_deserialization.py @@ -1,10 +1,19 @@ +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.transaction import Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.verification.verification_service import VerificationService +from hathor.verification.vertex_verifiers import VertexVerifiers from tests import unittest class _BaseTest: class _DeserializationTest(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + daa = DifficultyAdjustmentAlgorithm(settings=self._settings) + verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=daa) + self._verification_service = VerificationService(verifiers=verifiers) + def test_deserialize(self): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes) @@ -18,7 +27,7 @@ def verbose(key, value): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes, verbose=verbose) - tx.verify_without_storage() + self._verification_service.verify_without_storage(tx) key, version = v[1] self.assertEqual(key, 'version') diff --git a/tests/tx/test_tx_serialization.py b/tests/tx/test_tx_serialization.py index 0c72ae0f6..c32986025 100644 --- a/tests/tx/test_tx_serialization.py +++ b/tests/tx/test_tx_serialization.py @@ -1,8 +1,9 @@ from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.wallet.base_wallet import WalletOutputInfo from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks +from tests.utils import add_blocks_unlock_reward class _SerializationTest(unittest.TestCase): @@ -30,7 +31,7 @@ def setUp(self): self.tx1.weight = 10 self.tx1.parents = self.manager.get_new_tx_parents() self.tx1.timestamp = int(self.clock.seconds()) - self.tx1.resolve() + self.manager.cpu_mining_service.resolve(self.tx1) self.manager.propagate_tx(self.tx1) # Change of parents only, so it's a twin. @@ -38,7 +39,7 @@ def setUp(self): self.tx2 = Transaction.create_from_struct(self.tx1.get_struct()) self.tx2.parents = [self.tx1.parents[1], self.tx1.parents[0]] self.tx2.weight = 9 - self.tx2.resolve() + self.manager.cpu_mining_service.resolve(self.tx2) # Propagate a conflicting twin transaction self.manager.propagate_tx(self.tx2) diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index 909164169..7d25f97bd 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -9,7 +9,8 @@ from twisted.trial import unittest from hathor.conf import HathorSettings -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import TestMode +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Block, Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH from hathor.transaction.storage.exceptions import TransactionDoesNotExist @@ -19,7 +20,6 @@ BURN_ADDRESS, HAS_ROCKSDB, add_blocks_unlock_reward, - add_new_blocks, add_new_transactions, add_new_tx, create_tokens, @@ -63,7 +63,7 @@ def setUp(self): previous_timestamp = artifacts.settings.GENESIS_TX2_TIMESTAMP self.block = Block(timestamp=previous_timestamp + 1, weight=12, outputs=[output], parents=block_parents, nonce=100781, storage=self.tx_storage) - self.block.resolve() + self.manager.cpu_mining_service.resolve(self.block) self.manager.verification_service.verify(self.block) self.block.get_metadata().validation = ValidationState.FULL @@ -80,7 +80,7 @@ def setUp(self): timestamp=previous_timestamp + 2, weight=10, nonce=932049, inputs=[tx_input], outputs=[output], tokens=[bytes.fromhex('0023be91834c973d6a6ddd1a0ae411807b7c8ef2a015afb5177ee64b666ce602')], parents=tx_parents, storage=self.tx_storage) - self.tx.resolve() + self.manager.cpu_mining_service.resolve(self.tx) self.tx.get_metadata().validation = ValidationState.FULL # Disable weakref to test the internal methods. Otherwise, most methods return objects from weakref. @@ -145,7 +145,7 @@ def test_storage_basic_v2(self): self.assertEqual(set(tx_parents_hash), {self.genesis_txs[0].hash, self.genesis_txs[1].hash}) def test_vertices_count(self): - _set_test_mode(TestMode.TEST_ALL_WEIGHT) + self.manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT blocks_count = 1 txs_count = 2 @@ -500,12 +500,12 @@ def test_token_list(self): self.validate_save(tx) # 2 token uids tx.tokens.append(bytes.fromhex('00001c5c0b69d13b05534c94a69b2c8272294e6b0c536660a3ac264820677024')) - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) tx._metadata.hash = tx.hash self.validate_save(tx) # no tokens tx.tokens = [] - tx.resolve() + self.manager.cpu_mining_service.resolve(tx) tx._metadata.hash = tx.hash self.validate_save(tx) @@ -515,14 +515,14 @@ def _add_new_block(self, parents=None): if parents is not None: block.parents = parents block.weight = 10 - self.assertTrue(block.resolve()) + self.assertTrue(self.manager.cpu_mining_service.resolve(block)) self.manager.verification_service.verify(block) self.manager.propagate_tx(block, fails_silently=False) self.reactor.advance(5) return block def test_best_block_tips_cache(self): - _set_test_mode(TestMode.TEST_ALL_WEIGHT) + self.manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT self.manager.wallet.unlock(b'MYPASS') spent_blocks = add_new_blocks(self.manager, 10) self.assertEqual(self.tx_storage._best_block_tips_cache, [spent_blocks[-1].hash]) @@ -534,7 +534,7 @@ def test_best_block_tips_cache(self): self.assertEqual(self.tx_storage._best_block_tips_cache, [latest_blocks[-1].hash]) def test_topological_sort(self): - _set_test_mode(TestMode.TEST_ALL_WEIGHT) + self.manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT _total = 0 blocks = add_new_blocks(self.manager, 1, advance_clock=1) _total += len(blocks) diff --git a/tests/tx/test_verification.py b/tests/tx/test_verification.py new file mode 100644 index 000000000..336d54510 --- /dev/null +++ b/tests/tx/test_verification.py @@ -0,0 +1,1055 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest.mock import Mock, patch + +from hathor.crypto.util import get_address_from_public_key +from hathor.manager import HathorManager +from hathor.transaction import BitcoinAuxPow, Block, MergeMinedBlock, Transaction, TxInput, TxOutput +from hathor.transaction.scripts import P2PKH +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.transaction.validation_state import ValidationState +from hathor.verification.block_verifier import BlockVerifier +from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier +from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier +from hathor.verification.transaction_verifier import TransactionVerifier +from hathor.verification.vertex_verifier import VertexVerifier +from tests import unittest +from tests.utils import add_blocks_unlock_reward, create_tokens, get_genesis_key + + +class BaseVerificationTest(unittest.TestCase): + """ + This module implements simple tests related to vertex verification. It does not test the implementation of + verification methods, but rather simply asserts that each verification method is called when it is supposed to be + called. This guarantee is mostly useful during the verification refactors. + """ + __test__ = False + + def setUp(self) -> None: + super().setUp() + self.manager: HathorManager = self.create_peer('network') + self.verifiers = self.manager.verification_service.verifiers + + def _get_valid_block(self) -> Block: + return Block( + hash=b'some_hash', + storage=self.manager.tx_storage, + weight=1, + outputs=[TxOutput(value=6400, script=b'')], + parents=[ + self._settings.GENESIS_BLOCK_HASH, + self._settings.GENESIS_TX1_HASH, + self._settings.GENESIS_TX2_HASH + ] + ) + + def _get_valid_merge_mined_block(self) -> MergeMinedBlock: + return MergeMinedBlock( + hash=b'some_hash', + storage=self.manager.tx_storage, + weight=1, + outputs=[TxOutput(value=6400, script=b'')], + aux_pow=BitcoinAuxPow.dummy(), + parents=[ + self._settings.GENESIS_BLOCK_HASH, + self._settings.GENESIS_TX1_HASH, + self._settings.GENESIS_TX2_HASH + ], + ) + + def _get_valid_tx(self) -> Transaction: + genesis_private_key = get_genesis_key() + genesis_public_key = genesis_private_key.public_key() + genesis_block = self.manager.tx_storage.get_transaction(self._settings.GENESIS_BLOCK_HASH) + + utxo = genesis_block.outputs[0] + address = get_address_from_public_key(genesis_public_key) + script = P2PKH.create_output_script(address) + output = TxOutput(utxo.value, script) + _input = TxInput(self._settings.GENESIS_BLOCK_HASH, 0, b'') + + tx = Transaction( + hash=b'some_hash', + storage=self.manager.tx_storage, + weight=1, + inputs=[_input], + outputs=[output], + parents=[ + self._settings.GENESIS_TX1_HASH, + self._settings.GENESIS_TX2_HASH, + ] + ) + + data_to_sign = tx.get_sighash_all() + assert self.manager.wallet + public_bytes, signature = self.manager.wallet.get_input_aux_data(data_to_sign, genesis_private_key) + _input.data = P2PKH.create_input_data(public_bytes, signature) + + return tx + + def _get_valid_token_creation_tx(self) -> TokenCreationTransaction: + add_blocks_unlock_reward(self.manager) + assert self.manager.wallet + return create_tokens(self.manager, self.manager.wallet.get_unused_address()) + + def test_block_verify_basic(self) -> None: + block = self._get_valid_block() + + verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) + + with ( + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), + ): + self.manager.verification_service.verify_basic(block) + + # Block methods + verify_weight_wrapped.assert_called_once() + verify_reward_wrapped.assert_called_once() + + def test_block_verify_without_storage(self) -> None: + block = self._get_valid_block() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.block.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_data_wrapped = Mock(wraps=self.verifiers.block.verify_data) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), + patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + ): + self.manager.verification_service.verify_without_storage(block) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Block methods + verify_pow_wrapped.assert_called_once() + verify_no_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_data_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + def test_block_verify(self) -> None: + block = self._get_valid_block() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.block.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_data_wrapped = Mock(wraps=self.verifiers.block.verify_data) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_height_wrapped = Mock(wraps=self.verifiers.block.verify_height) + verify_mandatory_signaling_wrapped = Mock(wraps=self.verifiers.block.verify_mandatory_signaling) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), + patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), + patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), + ): + self.manager.verification_service.verify(block) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Block methods + verify_pow_wrapped.assert_called_once() + verify_no_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_data_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_height_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() + + def test_block_validate_basic(self) -> None: + block = self._get_valid_block() + + verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) + + with ( + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), + ): + self.manager.verification_service.validate_basic(block) + + # Block methods + verify_weight_wrapped.assert_called_once() + verify_reward_wrapped.assert_called_once() + + # validation should be BASIC + self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) + + # full validation should still pass and the validation updated to FULL + self.manager.verification_service.validate_full(block) + self.assertEqual(block.get_metadata().validation, ValidationState.FULL) + + # and if running basic validation again it shouldn't validate or change the validation state + verify_weight_wrapped2 = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped2 = Mock(wraps=self.verifiers.block.verify_reward) + + with ( + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), + ): + self.manager.verification_service.validate_basic(block) + + # Block methods + verify_weight_wrapped2.assert_not_called() + verify_reward_wrapped2.assert_not_called() + + # validation should still be FULL, it must not be BASIC + self.assertEqual(block.get_metadata().validation, ValidationState.FULL) + + def test_block_validate_full(self) -> None: + block = self._get_valid_block() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.block.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_data_wrapped = Mock(wraps=self.verifiers.block.verify_data) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_height_wrapped = Mock(wraps=self.verifiers.block.verify_height) + verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) + verify_mandatory_signaling_wrapped = Mock(wraps=self.verifiers.block.verify_mandatory_signaling) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), + patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), + patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), + ): + self.manager.verification_service.validate_full(block) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Block methods + verify_pow_wrapped.assert_called_once() + verify_no_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_data_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_height_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + verify_reward_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() + + def test_merge_mined_block_verify_basic(self) -> None: + block = self._get_valid_merge_mined_block() + + verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) + + with ( + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), + ): + self.manager.verification_service.verify_basic(block) + + # Block methods + verify_weight_wrapped.assert_called_once() + verify_reward_wrapped.assert_called_once() + + def test_merge_mined_block_verify_without_storage(self) -> None: + block = self._get_valid_merge_mined_block() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.block.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_data_wrapped = Mock(wraps=self.verifiers.block.verify_data) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + verify_aux_pow_wrapped = Mock(wraps=self.verifiers.merge_mined_block.verify_aux_pow) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), + patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), + ): + self.manager.verification_service.verify_without_storage(block) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Block methods + verify_pow_wrapped.assert_called_once() + verify_no_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_data_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + # MergeMinedBlock methods + verify_pow_wrapped.assert_called_once() + + def test_merge_mined_block_verify(self) -> None: + block = self._get_valid_merge_mined_block() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.block.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_data_wrapped = Mock(wraps=self.verifiers.block.verify_data) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_height_wrapped = Mock(wraps=self.verifiers.block.verify_height) + verify_mandatory_signaling_wrapped = Mock(wraps=self.verifiers.block.verify_mandatory_signaling) + + verify_aux_pow_wrapped = Mock(wraps=self.verifiers.merge_mined_block.verify_aux_pow) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), + patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), + patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), + patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), + ): + self.manager.verification_service.verify(block) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Block methods + verify_pow_wrapped.assert_called_once() + verify_no_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_data_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_height_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() + + # MergeMinedBlock methods + verify_pow_wrapped.assert_called_once() + + def test_merge_mined_block_validate_basic(self) -> None: + block = self._get_valid_merge_mined_block() + + verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) + + with ( + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), + ): + self.manager.verification_service.validate_basic(block) + + # Block methods + verify_weight_wrapped.assert_called_once() + verify_reward_wrapped.assert_called_once() + + # validation should be BASIC + self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) + + # full validation should still pass and the validation updated to FULL + self.manager.verification_service.validate_full(block) + self.assertEqual(block.get_metadata().validation, ValidationState.FULL) + + # and if running basic validation again it shouldn't validate or change the validation state + verify_weight_wrapped2 = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped2 = Mock(wraps=self.verifiers.block.verify_reward) + + with ( + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), + ): + self.manager.verification_service.validate_basic(block) + + # Block methods + verify_weight_wrapped2.assert_not_called() + verify_reward_wrapped2.assert_not_called() + + # validation should still be FULL, it must not be BASIC + self.assertEqual(block.get_metadata().validation, ValidationState.FULL) + + def test_merge_mined_block_validate_full(self) -> None: + block = self._get_valid_merge_mined_block() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.block.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_data_wrapped = Mock(wraps=self.verifiers.block.verify_data) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_height_wrapped = Mock(wraps=self.verifiers.block.verify_height) + verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) + verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) + verify_mandatory_signaling_wrapped = Mock(wraps=self.verifiers.block.verify_mandatory_signaling) + + verify_aux_pow_wrapped = Mock(wraps=self.verifiers.merge_mined_block.verify_aux_pow) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), + patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), + patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), + patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), + patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), + ): + self.manager.verification_service.validate_full(block) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Block methods + verify_pow_wrapped.assert_called_once() + verify_no_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_data_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_height_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + verify_reward_wrapped.assert_called_once() + verify_mandatory_signaling_wrapped.assert_called_once() + + # MergeMinedBlock methods + verify_pow_wrapped.assert_called_once() + + def test_transaction_verify_basic(self) -> None: + tx = self._get_valid_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + ): + self.manager.verification_service.verify_basic(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_parents_basic_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + def test_transaction_verify_without_storage(self) -> None: + tx = self._get_valid_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + ): + self.manager.verification_service.verify_without_storage(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + def test_transaction_verify(self) -> None: + add_blocks_unlock_reward(self.manager) + tx = self._get_valid_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_sigops_input_wrapped = Mock(wraps=self.verifiers.tx.verify_sigops_input) + verify_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_inputs) + verify_script_wrapped = Mock(wraps=self.verifiers.tx.verify_script) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) + verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(TransactionVerifier, 'verify_sigops_input', verify_sigops_input_wrapped), + patch.object(TransactionVerifier, 'verify_inputs', verify_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_script', verify_script_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), + patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + ): + self.manager.verification_service.verify(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + verify_sigops_input_wrapped.assert_called_once() + verify_inputs_wrapped.assert_called_once() + verify_script_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_sum_wrapped.assert_called_once() + verify_reward_locked_wrapped.assert_called_once() + + def test_transaction_validate_basic(self) -> None: + # add enough blocks so that it can be fully validated later on the tests + add_blocks_unlock_reward(self.manager) + tx = self._get_valid_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + ): + self.manager.verification_service.validate_basic(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_parents_basic_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + # validation should be BASIC + self.assertEqual(tx.get_metadata().validation, ValidationState.BASIC) + + # full validation should still pass and the validation updated to FULL + self.manager.verification_service.validate_full(tx) + self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) + + # and if running basic validation again it shouldn't validate or change the validation state + verify_parents_basic_wrapped2 = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped2 = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped2 = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_outputs_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_number_of_outputs_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped2), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped2), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped2), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped2), + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped2), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), + ): + self.manager.verification_service.validate_basic(tx) + + # Transaction methods + verify_parents_basic_wrapped2.assert_not_called() + verify_weight_wrapped2.assert_not_called() + verify_pow_wrapped2.assert_not_called() + verify_number_of_inputs_wrapped2.assert_not_called() + verify_outputs_wrapped2.assert_not_called() + verify_number_of_outputs_wrapped2.assert_not_called() + verify_sigops_output_wrapped2.assert_not_called() + + # validation should still be FULL, it must not be BASIC + self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) + + def test_transaction_validate_full(self) -> None: + add_blocks_unlock_reward(self.manager) + tx = self._get_valid_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_sigops_input_wrapped = Mock(wraps=self.verifiers.tx.verify_sigops_input) + verify_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_inputs) + verify_script_wrapped = Mock(wraps=self.verifiers.tx.verify_script) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) + verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(TransactionVerifier, 'verify_sigops_input', verify_sigops_input_wrapped), + patch.object(TransactionVerifier, 'verify_inputs', verify_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_script', verify_script_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), + patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + ): + self.manager.verification_service.validate_full(tx) + + # Vertex methods + assert verify_outputs_wrapped.call_count == 2 + + # Transaction methods + verify_parents_basic_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + assert verify_pow_wrapped.call_count == 2 + assert verify_number_of_inputs_wrapped.call_count == 2 + assert verify_output_token_indexes_wrapped.call_count == 2 + assert verify_number_of_outputs_wrapped.call_count == 2 + assert verify_sigops_output_wrapped.call_count == 2 + verify_sigops_input_wrapped.assert_called_once() + verify_inputs_wrapped.assert_called_once() + verify_script_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_sum_wrapped.assert_called_once() + verify_reward_locked_wrapped.assert_called_once() + + # validation should be FULL + self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) + + # and if running full validation again it shouldn't validate or change the validation state + verify_parents_basic_wrapped2 = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped2 = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped2 = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_outputs_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_number_of_outputs_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped2), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped2), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped2), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped2), + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped2), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), + ): + self.manager.verification_service.validate_basic(tx) + + # Transaction methods + verify_parents_basic_wrapped2.assert_not_called() + verify_weight_wrapped2.assert_not_called() + verify_pow_wrapped2.assert_not_called() + verify_number_of_inputs_wrapped2.assert_not_called() + verify_outputs_wrapped2.assert_not_called() + verify_number_of_outputs_wrapped2.assert_not_called() + verify_sigops_output_wrapped2.assert_not_called() + + # validation should still be FULL, it must not be BASIC + self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) + + def test_token_creation_transaction_verify_basic(self) -> None: + tx = self._get_valid_token_creation_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + ): + self.manager.verification_service.verify_basic(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_parents_basic_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + def test_token_creation_transaction_verify_without_storage(self) -> None: + tx = self._get_valid_token_creation_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + ): + self.manager.verification_service.verify_without_storage(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + def test_token_creation_transaction_verify(self) -> None: + tx = self._get_valid_token_creation_tx() + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_sigops_input_wrapped = Mock(wraps=self.verifiers.tx.verify_sigops_input) + verify_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_inputs) + verify_script_wrapped = Mock(wraps=self.verifiers.tx.verify_script) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) + verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + + verify_token_info_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_token_info) + verify_minted_tokens_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_minted_tokens) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(TransactionVerifier, 'verify_sigops_input', verify_sigops_input_wrapped), + patch.object(TransactionVerifier, 'verify_inputs', verify_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_script', verify_script_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), + patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + patch.object(TokenCreationTransactionVerifier, 'verify_token_info', verify_token_info_wrapped), + patch.object(TokenCreationTransactionVerifier, 'verify_minted_tokens', verify_minted_tokens_wrapped), + ): + self.manager.verification_service.verify(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + verify_sigops_input_wrapped.assert_called_once() + verify_inputs_wrapped.assert_called_once() + verify_script_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_sum_wrapped.assert_called_once() + verify_reward_locked_wrapped.assert_called_once() + + # TokenCreationTransaction methods + verify_token_info_wrapped.assert_called_once() + verify_minted_tokens_wrapped.assert_called_once() + + def test_token_creation_transaction_validate_basic(self) -> None: + tx = self._get_valid_token_creation_tx() + tx.get_metadata().validation = ValidationState.INITIAL + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + ): + self.manager.verification_service.validate_basic(tx) + + # Vertex methods + verify_outputs_wrapped.assert_called_once() + + # Transaction methods + verify_parents_basic_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + verify_pow_wrapped.assert_called_once() + verify_number_of_inputs_wrapped.assert_called_once() + verify_output_token_indexes_wrapped.assert_called_once() + verify_number_of_outputs_wrapped.assert_called_once() + verify_sigops_output_wrapped.assert_called_once() + + # validation should be BASIC + self.assertEqual(tx.get_metadata().validation, ValidationState.BASIC) + + # full validation should still pass and the validation updated to FULL + self.manager.verification_service.validate_full(tx) + self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) + + # and if running basic validation again it shouldn't validate or change the validation state + verify_parents_basic_wrapped2 = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped2 = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped2 = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_outputs_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_number_of_outputs_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped2 = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + + with ( + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped2), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped2), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped2), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped2), + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped2), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), + ): + self.manager.verification_service.validate_basic(tx) + + # Transaction methods + verify_parents_basic_wrapped2.assert_not_called() + verify_weight_wrapped2.assert_not_called() + verify_pow_wrapped2.assert_not_called() + verify_number_of_inputs_wrapped2.assert_not_called() + verify_outputs_wrapped2.assert_not_called() + verify_number_of_outputs_wrapped2.assert_not_called() + verify_sigops_output_wrapped2.assert_not_called() + + # validation should still be FULL, it must not be BASIC + self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) + + def test_token_creation_transaction_validate_full(self) -> None: + tx = self._get_valid_token_creation_tx() + tx.get_metadata().validation = ValidationState.INITIAL + + verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + + verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) + verify_weight_wrapped = Mock(wraps=self.verifiers.tx.verify_weight) + verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) + verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) + verify_output_token_indexes_wrapped = Mock(wraps=self.verifiers.tx.verify_output_token_indexes) + verify_number_of_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_number_of_outputs) + verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) + verify_sigops_input_wrapped = Mock(wraps=self.verifiers.tx.verify_sigops_input) + verify_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_inputs) + verify_script_wrapped = Mock(wraps=self.verifiers.tx.verify_script) + verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) + verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) + verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + + verify_token_info_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_token_info) + verify_minted_tokens_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_minted_tokens) + + with ( + patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), + patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), + patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), + patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), + patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), + patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), + patch.object(TransactionVerifier, 'verify_sigops_input', verify_sigops_input_wrapped), + patch.object(TransactionVerifier, 'verify_inputs', verify_inputs_wrapped), + patch.object(TransactionVerifier, 'verify_script', verify_script_wrapped), + patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), + patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), + patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + patch.object(TokenCreationTransactionVerifier, 'verify_token_info', verify_token_info_wrapped), + patch.object(TokenCreationTransactionVerifier, 'verify_minted_tokens', verify_minted_tokens_wrapped), + ): + self.manager.verification_service.validate_full(tx) + + # Vertex methods + assert verify_outputs_wrapped.call_count == 2 + + # Transaction methods + verify_parents_basic_wrapped.assert_called_once() + verify_weight_wrapped.assert_called_once() + assert verify_pow_wrapped.call_count == 2 + assert verify_number_of_inputs_wrapped.call_count == 2 + assert verify_output_token_indexes_wrapped.call_count == 2 + assert verify_number_of_outputs_wrapped.call_count == 2 + assert verify_sigops_output_wrapped.call_count == 2 + verify_sigops_input_wrapped.assert_called_once() + verify_inputs_wrapped.assert_called_once() + verify_script_wrapped.assert_called_once() + verify_parents_wrapped.assert_called_once() + verify_sum_wrapped.assert_called_once() + verify_reward_locked_wrapped.assert_called_once() + + # TokenCreationTransaction methods + verify_token_info_wrapped.assert_called_once() + verify_minted_tokens_wrapped.assert_called_once() + + +class SyncV1VerificationTest(unittest.SyncV1Params, BaseVerificationTest): + __test__ = True + + +class SyncV2VerificationTest(unittest.SyncV2Params, BaseVerificationTest): + __test__ = True + + +# sync-bridge should behave like sync-v2 +class SyncBridgeVerificationTest(unittest.SyncBridgeParams, SyncV2VerificationTest): + pass diff --git a/tests/unittest.py b/tests/unittest.py index 837bec2e5..852f27bd8 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -12,12 +12,13 @@ from hathor.builder import BuildArtifacts, Builder from hathor.conf import HathorSettings from hathor.conf.get_settings import get_settings -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.p2p.peer_id import PeerId from hathor.p2p.sync_version import SyncVersion +from hathor.reactor import ReactorProtocol as Reactor, get_global_reactor from hathor.simulator.clock import MemoryReactorHeapClock from hathor.transaction import BaseTransaction -from hathor.util import Random, Reactor, reactor +from hathor.util import Random from hathor.wallet import HDWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock @@ -104,14 +105,13 @@ class TestCase(unittest.TestCase): seed_config: Optional[int] = None def setUp(self): - _set_test_mode(TestMode.TEST_ALL_WEIGHT) self.tmpdirs = [] self.clock = TestMemoryReactorClock() self.clock.advance(time.time()) self.log = logger.new() self.reset_peer_id_pool() self.seed = secrets.randbits(64) if self.seed_config is None else self.seed_config - self.log.debug('set seed', seed=self.seed) + self.log.info('set seed', seed=self.seed) self.rng = Random(self.seed) self._pending_cleanups = [] self._settings = get_settings() @@ -176,7 +176,7 @@ def create_peer_from_builder(self, builder, start_manager=True): if start_manager: manager.start() self.clock.run() - self.run_to_completion() + self.clock.advance(5) return manager @@ -229,11 +229,9 @@ def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unloc builder.force_memory_index() if enable_sync_v1 is True: - # Enable Sync v1.1 (instead of v1.0) - builder.enable_sync_v1_1() + builder.enable_sync_v1() elif enable_sync_v1 is False: - # Disable Sync v1.1 (instead of v1.0) - builder.disable_sync_v1_1() + builder.disable_sync_v1() if enable_sync_v2 is True: builder.enable_sync_v2() @@ -246,19 +244,13 @@ def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unloc if utxo_index: builder.enable_utxo_index() + daa = DifficultyAdjustmentAlgorithm(settings=self._settings, test_mode=TestMode.TEST_ALL_WEIGHT) + builder.set_daa(daa) manager = self.create_peer_from_builder(builder, start_manager=start_manager) # XXX: just making sure that tests set this up correctly - if enable_sync_v2: - assert SyncVersion.V2 in manager.connections._sync_factories - else: - assert SyncVersion.V2 not in manager.connections._sync_factories - if enable_sync_v1: - assert SyncVersion.V1 not in manager.connections._sync_factories - assert SyncVersion.V1_1 in manager.connections._sync_factories - else: - assert SyncVersion.V1 not in manager.connections._sync_factories - assert SyncVersion.V1_1 not in manager.connections._sync_factories + assert manager.connections.is_sync_version_enabled(SyncVersion.V2) == enable_sync_v2 + assert manager.connections.is_sync_version_enabled(SyncVersion.V1_1) == enable_sync_v1 return manager @@ -342,7 +334,8 @@ def assertTipsEqualSyncV2(self, manager1, manager2, *, strict_sync_v2_indexes=Tr # best block (from height index) b1 = manager1.tx_storage.indexes.height.get_tip() b2 = manager2.tx_storage.indexes.height.get_tip() - self.assertEqual(b1, b2) + self.assertIn(b1, s2) + self.assertIn(b2, s1) def assertConsensusEqual(self, manager1, manager2): _, enable_sync_v2 = self._syncVersionFlags() @@ -471,7 +464,7 @@ def assertV1SyncedProgress(self, node_sync): self.assertEqual(node_sync.synced_timestamp, node_sync.peer_timestamp) def assertV2SyncedProgress(self, node_sync): - self.assertEqual(node_sync.synced_height, node_sync.peer_height) + self.assertEqual(node_sync.synced_block, node_sync.peer_best_block) def clean_tmpdirs(self): for tmpdir in self.tmpdirs: @@ -507,6 +500,7 @@ def clean_pending(self, required_to_quiesce=True): Copy from: https://github.com/zooko/pyutil/blob/master/pyutil/testutil.py#L68 """ + reactor = get_global_reactor() pending = reactor.getDelayedCalls() active = bool(pending) for p in pending: diff --git a/tests/utils.py b/tests/utils.py index 6a9403666..cdcbd7bb2 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,5 +1,4 @@ import base64 -import hashlib import os import string import subprocess @@ -18,11 +17,13 @@ from hathor.event.model.event_data import TxData, TxMetadata from hathor.event.model.event_type import EventType from hathor.manager import HathorManager +from hathor.mining.cpu_mining_service import CpuMiningService +from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_double_spending, gen_new_tx from hathor.transaction import BaseTransaction, Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH, HathorScript, Opcode, parse_address_script from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.util import get_deposit_amount -from hathor.util import Random +from hathor.util import Random, not_none try: import rocksdb # noqa: F401 @@ -37,11 +38,7 @@ BURN_ADDRESS = bytes.fromhex('28acbfb94571417423c1ed66f706730c4aea516ac5762cccb8') -class NoCandidatesError(Exception): - pass - - -def resolve_block_bytes(block_bytes): +def resolve_block_bytes(*, block_bytes: bytes, cpu_mining_service: CpuMiningService) -> bytes: """ From block bytes we create a block and resolve pow Return block bytes with hash and nonce after pow :rtype: bytes @@ -49,7 +46,7 @@ def resolve_block_bytes(block_bytes): from hathor.transaction import Block block_bytes = base64.b64decode(block_bytes) block = Block.create_from_struct(block_bytes) - block.resolve() + cpu_mining_service.resolve(block) return block.get_struct() @@ -124,60 +121,12 @@ def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, tx2.weight = weight or 25 tx2.timestamp += inc_timestamp if resolve: - tx2.resolve() + manager.cpu_mining_service.resolve(tx2) else: tx2.update_hash() return tx2 -def gen_new_double_spending(manager: HathorManager, *, use_same_parents: bool = False, - tx: Optional[Transaction] = None, weight: float = 1) -> Transaction: - if tx is None: - tx_candidates = manager.get_new_tx_parents() - genesis = manager.tx_storage.get_all_genesis() - genesis_txs = [tx for tx in genesis if not tx.is_block] - # XXX: it isn't possible to double-spend a genesis transaction, thus we remove it from tx_candidates - for genesis_tx in genesis_txs: - if genesis_tx.hash in tx_candidates: - tx_candidates.remove(genesis_tx.hash) - if not tx_candidates: - raise NoCandidatesError() - # assert tx_candidates, 'Must not be empty, otherwise test was wrongly set up' - tx_hash = manager.rng.choice(tx_candidates) - tx = cast(Transaction, manager.tx_storage.get_transaction(tx_hash)) - - txin = manager.rng.choice(tx.inputs) - - from hathor.transaction.scripts import P2PKH, parse_address_script - spent_tx = tx.get_spent_tx(txin) - spent_txout = spent_tx.outputs[txin.index] - p2pkh = parse_address_script(spent_txout.script) - assert isinstance(p2pkh, P2PKH) - - from hathor.wallet.base_wallet import WalletInputInfo, WalletOutputInfo - value = spent_txout.value - wallet = manager.wallet - assert wallet is not None - private_key = wallet.get_private_key(p2pkh.address) - inputs = [WalletInputInfo(tx_id=txin.tx_id, index=txin.index, private_key=private_key)] - - address = wallet.get_unused_address(mark_as_used=True) - outputs = [WalletOutputInfo(address=decode_address(address), value=int(value), timelock=None)] - - tx2 = wallet.prepare_transaction(Transaction, inputs, outputs) - tx2.storage = manager.tx_storage - tx2.weight = weight - tx2.timestamp = max(tx.timestamp + 1, int(manager.reactor.seconds())) - - if use_same_parents: - tx2.parents = list(tx.parents) - else: - tx2.parents = manager.get_new_tx_parents(tx2.timestamp) - - tx2.resolve() - return tx2 - - def add_new_double_spending(manager: HathorManager, *, use_same_parents: bool = False, tx: Optional[Transaction] = None, weight: float = 1) -> Transaction: tx = gen_new_double_spending(manager, use_same_parents=use_same_parents, tx=tx, weight=weight) @@ -185,27 +134,6 @@ def add_new_double_spending(manager: HathorManager, *, use_same_parents: bool = return tx -def gen_new_tx(manager, address, value, verify=True): - from hathor.transaction import Transaction - from hathor.wallet.base_wallet import WalletOutputInfo - - outputs = [] - outputs.append(WalletOutputInfo(address=decode_address(address), value=int(value), timelock=None)) - - tx = manager.wallet.prepare_transaction_compute_inputs(Transaction, outputs, manager.tx_storage) - tx.storage = manager.tx_storage - - max_ts_spent_tx = max(tx.get_spent_tx(txin).timestamp for txin in tx.inputs) - tx.timestamp = max(max_ts_spent_tx + 1, int(manager.reactor.seconds())) - - tx.weight = 1 - tx.parents = manager.get_new_tx_parents(tx.timestamp) - tx.resolve() - if verify: - manager.verification_service.verify(tx) - return tx - - def add_new_tx(manager, address, value, advance_clock=None, propagate=True): """ Create, resolve and propagate a new tx @@ -250,52 +178,6 @@ def add_new_transactions(manager, num_txs, advance_clock=None, propagate=True): return txs -def add_new_block(manager, advance_clock=None, *, parent_block_hash=None, - data=b'', weight=None, address=None, propagate=True): - """ Create, resolve and propagate a new block - - :param manager: Manager object to handle the creation - :type manager: :py:class:`hathor.manager.HathorManager` - - :return: Block created - :rtype: :py:class:`hathor.transaction.block.Block` - """ - block = manager.generate_mining_block(parent_block_hash=parent_block_hash, data=data, address=address) - if weight is not None: - block.weight = weight - block.resolve() - manager.verification_service.validate_full(block) - if propagate: - manager.propagate_tx(block, fails_silently=False) - if advance_clock: - manager.reactor.advance(advance_clock) - return block - - -def add_new_blocks(manager, num_blocks, advance_clock=None, *, parent_block_hash=None, - block_data=b'', weight=None, address=None): - """ Create, resolve and propagate some blocks - - :param manager: Manager object to handle the creation - :type manager: :py:class:`hathor.manager.HathorManager` - - :param num_blocks: Quantity of blocks to be created - :type num_blocks: int - - :return: Blocks created - :rtype: list[Block] - """ - blocks = [] - for _ in range(num_blocks): - blocks.append( - add_new_block(manager, advance_clock, parent_block_hash=parent_block_hash, - data=block_data, weight=weight, address=address) - ) - if parent_block_hash: - parent_block_hash = blocks[-1].hash - return blocks - - def add_blocks_unlock_reward(manager): """This method adds new blocks to a 'burn address' to make sure the existing block rewards can be spent. It uses a 'burn address' so the manager's wallet @@ -503,7 +385,7 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m deposit_input = [] while total_reward < deposit_amount: block = add_new_block(manager, advance_clock=1, address=address) - deposit_input.append(TxInput(block.hash, 0, b'')) + deposit_input.append(TxInput(not_none(block.hash), 0, b'')) total_reward += block.outputs[0].value if total_reward > deposit_amount: @@ -550,7 +432,7 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m for input_ in tx.inputs: input_.data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + manager.cpu_mining_service.resolve(tx) if propagate: manager.verification_service.verify(tx) manager.propagate_tx(tx, fails_silently=False) @@ -593,7 +475,7 @@ def add_tx_with_data_script(manager: 'HathorManager', data: list[str], propagate burn_input = [] while total_reward < burn_amount: block = add_new_block(manager, advance_clock=1, address=address) - burn_input.append(TxInput(block.hash, 0, b'')) + burn_input.append(TxInput(not_none(block.hash), 0, b'')) total_reward += block.outputs[0].value # Create the change output, if needed @@ -638,7 +520,7 @@ def add_tx_with_data_script(manager: 'HathorManager', data: list[str], propagate for input_ in tx.inputs: input_.data = P2PKH.create_input_data(public_bytes, signature) - tx.resolve() + manager.cpu_mining_service.resolve(tx) if propagate: manager.verification_service.verify(tx) @@ -686,12 +568,8 @@ def gen_next_id(self) -> int: def generate_mocked_event(self, event_id: Optional[int] = None, group_id: Optional[int] = None) -> BaseEvent: """ Generates a mocked event with the best block found message """ - _hash = hashlib.sha256(self.generate_random_word(10).encode('utf-8')) - peer_id_mock = _hash.hexdigest() - return BaseEvent( id=event_id or self.gen_next_id(), - peer_id=peer_id_mock, timestamp=1658892990, type=EventType.VERTEX_METADATA_CHANGED, group_id=group_id, @@ -709,7 +587,6 @@ def create_event(cls, event_id: int) -> BaseEvent: """ Generates a mocked event with fixed properties, except the ID """ return BaseEvent( - peer_id='123', id=event_id, timestamp=123456, type=EventType.VERTEX_METADATA_CHANGED, diff --git a/tests/wallet/test_balance_update.py b/tests/wallet/test_balance_update.py index 15aad3e6b..eb44d91ff 100644 --- a/tests/wallet/test_balance_update.py +++ b/tests/wallet/test_balance_update.py @@ -1,11 +1,12 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH from hathor.wallet.base_wallet import SpentTx, UnspentTx, WalletBalance, WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import PrivateKeyNotFound from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks, create_tokens +from tests.utils import add_blocks_unlock_reward, create_tokens settings = HathorSettings() @@ -38,7 +39,7 @@ def setUp(self): self.tx1.weight = 10 self.tx1.parents = self.manager.get_new_tx_parents() self.tx1.timestamp = int(self.clock.seconds()) - self.tx1.resolve() + self.manager.cpu_mining_service.resolve(self.tx1) self.manager.propagate_tx(self.tx1) self.run_to_completion() @@ -54,7 +55,7 @@ def test_balance_update1(self): tx2 = Transaction.create_from_struct(self.tx1.get_struct()) tx2.parents = [self.tx1.parents[1], self.tx1.parents[0]] tx2.weight = 9 - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx2) @@ -100,7 +101,7 @@ def test_balance_update2(self): # Same weight, so both will be voided then the balance increases tx2 = Transaction.create_from_struct(self.tx1.get_struct()) tx2.parents = [self.tx1.parents[1], self.tx1.parents[0]] - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx2) @@ -129,7 +130,7 @@ def test_balance_update3(self): tx2 = Transaction.create_from_struct(self.tx1.get_struct()) tx2.parents = [self.tx1.parents[1], self.tx1.parents[0]] tx2.weight = 13 - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx2) @@ -165,7 +166,7 @@ def test_balance_update4(self): tx2.weight = 10 tx2.parents = [self.tx1.hash, self.tx1.parents[0]] tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -184,7 +185,7 @@ def test_balance_update4(self): # Change of parents only, so it's a twin. tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.parents = [tx2.parents[1], tx2.parents[0]] - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx3) @@ -221,12 +222,12 @@ def test_balance_update5(self): tx2.weight = 10 tx2.parents = [self.tx1.hash, self.tx1.parents[0]] tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) # Change of parents only, so it's a twin. tx3 = Transaction.create_from_struct(self.tx1.get_struct()) tx3.parents = [self.tx1.parents[1], self.tx1.parents[0]] - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx2) @@ -258,7 +259,7 @@ def test_balance_update6(self): # Change of parents only, so it's a twin. tx2 = Transaction.create_from_struct(self.tx1.get_struct()) tx2.parents = [self.tx1.parents[1], self.tx1.parents[0]] - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) address = self.get_address(0) value = 100 @@ -271,7 +272,7 @@ def test_balance_update6(self): tx3.weight = 10 tx3.parents = [self.tx1.hash, self.tx1.parents[0]] tx3.timestamp = int(self.clock.seconds()) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx2) @@ -301,13 +302,13 @@ def test_balance_update7(self): tx2.weight = 10 tx2.parents = [self.tx1.hash, self.tx1.parents[0]] tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) # Change of parents only, so it's a twin. tx3 = Transaction.create_from_struct(self.tx1.get_struct()) tx3.parents = [self.tx1.parents[1], self.tx1.parents[0]] tx3.weight = 14 - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx2) @@ -341,7 +342,7 @@ def test_balance_update_twin_tx(self): tx2.weight = 10 tx2.parents = self.manager.get_new_tx_parents() tx2.timestamp = int(self.clock.seconds()) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() @@ -352,7 +353,7 @@ def test_balance_update_twin_tx(self): tx3.weight = 10 tx3.parents = self.manager.get_new_tx_parents() tx3.timestamp = int(self.clock.seconds()) - tx3.resolve() + self.manager.cpu_mining_service.resolve(tx3) self.manager.propagate_tx(tx3) self.run_to_completion() @@ -365,7 +366,7 @@ def test_balance_update_twin_tx(self): tx4.weight = 10 tx4.parents = [tx3.hash, tx3.parents[0]] tx4.timestamp = int(self.clock.seconds()) - tx4.resolve() + self.manager.cpu_mining_service.resolve(tx4) self.manager.propagate_tx(tx4) self.run_to_completion() @@ -373,7 +374,7 @@ def test_balance_update_twin_tx(self): tx5 = Transaction.create_from_struct(tx4.get_struct()) tx5.parents = [tx4.parents[1], tx4.parents[0]] tx5.weight = 10 - tx5.resolve() + self.manager.cpu_mining_service.resolve(tx5) # Propagate a conflicting twin transaction self.manager.propagate_tx(tx5) @@ -427,7 +428,7 @@ def test_tokens_balance(self): self.manager.wallet.get_private_key(address_b58) ) tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.verification_service.verify(tx2) self.manager.propagate_tx(tx2) self.run_to_completion() diff --git a/tests/wallet/test_index.py b/tests/wallet/test_index.py index f13b427f6..3a4fb7935 100644 --- a/tests/wallet/test_index.py +++ b/tests/wallet/test_index.py @@ -1,8 +1,9 @@ from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.wallet.base_wallet import WalletOutputInfo from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_blocks +from tests.utils import add_blocks_unlock_reward class BaseWalletIndexTest(unittest.TestCase): @@ -31,12 +32,12 @@ def test_twin_tx(self): tx1.weight = 10 tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) - tx1.resolve() + self.manager.cpu_mining_service.resolve(tx1) # Change of parents only, so it's a twin tx2 = Transaction.create_from_struct(tx1.get_struct()) tx2.parents = [tx1.parents[1], tx1.parents[0]] - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.assertNotEqual(tx1.hash, tx2.hash) self.manager.propagate_tx(tx1) diff --git a/tests/wallet/test_wallet.py b/tests/wallet/test_wallet.py index 5add4f3ad..bade3f519 100644 --- a/tests/wallet/test_wallet.py +++ b/tests/wallet/test_wallet.py @@ -6,13 +6,14 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address, get_address_b58_from_public_key, get_private_key_bytes +from hathor.simulator.utils import add_new_block from hathor.transaction import Transaction, TxInput from hathor.wallet import Wallet from hathor.wallet.base_wallet import WalletBalance, WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import InsufficientFunds, InvalidAddress, OutOfUnusedAddresses, WalletLocked from hathor.wallet.keypair import KeyPair from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_block, create_tokens, get_genesis_key +from tests.utils import add_blocks_unlock_reward, create_tokens, get_genesis_key settings = HathorSettings() @@ -207,7 +208,7 @@ def test_create_token_transaction(self): tx2.storage = self.manager.tx_storage tx2.timestamp = tx.timestamp + 1 tx2.parents = self.manager.get_new_tx_parents() - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.manager.verification_service.verify(tx2) self.assertNotEqual(len(tx2.inputs), 0) @@ -265,7 +266,7 @@ def test_maybe_spent_txs(self): tx2.parents = self.manager.get_new_tx_parents(tx2.timestamp) tx2.weight = 1 tx2.timestamp = blocks[-1].timestamp + 1 - tx2.resolve() + self.manager.cpu_mining_service.resolve(tx2) self.assertTrue(self.manager.on_new_tx(tx2, fails_silently=False)) self.clock.advance(2) self.assertEqual(0, len(w.maybe_spent_txs[settings.HATHOR_TOKEN_UID])) diff --git a/tests/wallet/test_wallet_hd.py b/tests/wallet/test_wallet_hd.py index 5c18648cb..ea2faa615 100644 --- a/tests/wallet/test_wallet_hd.py +++ b/tests/wallet/test_wallet_hd.py @@ -1,11 +1,12 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address +from hathor.simulator.utils import add_new_block from hathor.transaction import Transaction from hathor.wallet import HDWallet from hathor.wallet.base_wallet import WalletBalance, WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import InsufficientFunds from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_new_block +from tests.utils import add_blocks_unlock_reward settings = HathorSettings() @@ -42,7 +43,8 @@ def test_transaction_and_balance(self): out = WalletOutputInfo(decode_address(new_address2), self.TOKENS, timelock=None) tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, [out], self.tx_storage) tx1.update_hash() - tx1.verify_script(tx1.inputs[0], block) + verifier = self.manager.verification_service.verifiers.tx + verifier.verify_script(tx=tx1, input_tx=tx1.inputs[0], spent_tx=block) tx1.storage = self.tx_storage tx1.get_metadata().validation = ValidationState.FULL self.wallet.on_new_tx(tx1) @@ -62,7 +64,7 @@ def test_transaction_and_balance(self): tx2.storage = self.tx_storage tx2.update_hash() tx2.storage = self.tx_storage - tx2.verify_script(tx2.inputs[0], tx1) + verifier.verify_script(tx=tx2, input_tx=tx2.inputs[0], spent_tx=tx1) tx2.get_metadata().validation = ValidationState.FULL self.tx_storage.save_transaction(tx2) self.wallet.on_new_tx(tx2)