diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 4499b69a6..1c83b3494 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -24,6 +24,8 @@ from hathor.event import EventManager from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage from hathor.event.websocket import EventWebsocketFactory +from hathor.feature_activation.bit_signaling_service import BitSignalingService +from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager @@ -64,7 +66,6 @@ class BuildArtifacts(NamedTuple): wallet: Optional[BaseWallet] rocksdb_storage: Optional[RocksDBStorage] stratum_factory: Optional[StratumFactory] - feature_service: FeatureService class Builder: @@ -80,7 +81,7 @@ def __init__(self) -> None: self.log = logger.new() self.artifacts: Optional[BuildArtifacts] = None - self._settings: HathorSettingsType = HathorSettings() + self._settings: Optional[HathorSettingsType] = None self._rng: Random = Random() self._checkpoints: Optional[list[Checkpoint]] = None self._capabilities: Optional[list[str]] = None @@ -95,6 +96,11 @@ def __init__(self) -> None: self._event_manager: Optional[EventManager] = None self._enable_event_queue: Optional[bool] = None + self._support_features: set[Feature] = set() + self._not_support_features: set[Feature] = set() + self._feature_service: Optional[FeatureService] = None + self._bit_signaling_service: Optional[BitSignalingService] = None + self._rocksdb_path: Optional[str] = None self._rocksdb_storage: Optional[RocksDBStorage] = None self._rocksdb_cache_capacity: Optional[int] = None @@ -134,7 +140,7 @@ def build(self) -> BuildArtifacts: if self._network is None: raise TypeError('you must set a network') - settings = self._get_settings() + settings = self._get_or_create_settings() reactor = self._get_reactor() pubsub = self._get_or_create_pubsub() @@ -149,6 +155,8 @@ def build(self) -> BuildArtifacts: event_manager = self._get_or_create_event_manager() indexes = self._get_or_create_indexes_manager() tx_storage = self._get_or_create_tx_storage(indexes) + feature_service = self._get_or_create_feature_service(tx_storage) + bit_signaling_service = self._get_or_create_bit_signaling_service(tx_storage) if self._enable_address_index: indexes.enable_address_index(pubsub) @@ -181,6 +189,8 @@ def build(self) -> BuildArtifacts: checkpoints=self._checkpoints, capabilities=self._capabilities, environment_info=get_environment_info(self._cmdline, peer_id.id), + feature_service=feature_service, + bit_signaling_service=bit_signaling_service, **kwargs ) @@ -190,8 +200,6 @@ def build(self) -> BuildArtifacts: if self._enable_stratum_server: stratum_factory = self._create_stratum_server(manager) - feature_service = self._create_feature_service(tx_storage) - self.artifacts = BuildArtifacts( peer_id=peer_id, settings=settings, @@ -206,7 +214,6 @@ def build(self) -> BuildArtifacts: wallet=wallet, rocksdb_storage=self._rocksdb_storage, stratum_factory=stratum_factory, - feature_service=feature_service ) return self.artifacts @@ -220,6 +227,16 @@ def set_event_manager(self, event_manager: EventManager) -> 'Builder': self._event_manager = event_manager return self + def set_feature_service(self, feature_service: FeatureService) -> 'Builder': + self.check_if_can_modify() + self._feature_service = feature_service + return self + + def set_bit_signaling_service(self, bit_signaling_service: BitSignalingService) -> 'Builder': + self.check_if_can_modify() + self._bit_signaling_service = bit_signaling_service + return self + def set_rng(self, rng: Random) -> 'Builder': self.check_if_can_modify() self._rng = rng @@ -240,7 +257,9 @@ def set_peer_id(self, peer_id: PeerId) -> 'Builder': self._peer_id = peer_id return self - def _get_settings(self) -> HathorSettingsType: + def _get_or_create_settings(self) -> HathorSettingsType: + if self._settings is None: + self._settings = HathorSettings() return self._settings def _get_reactor(self) -> Reactor: @@ -252,7 +271,7 @@ def _get_soft_voided_tx_ids(self) -> set[bytes]: if self._soft_voided_tx_ids is not None: return self._soft_voided_tx_ids - settings = self._get_settings() + settings = self._get_or_create_settings() return set(settings.SOFT_VOIDED_TX_IDS) @@ -272,12 +291,6 @@ def _create_stratum_server(self, manager: HathorManager) -> StratumFactory: manager.metrics.stratum_factory = stratum_factory return stratum_factory - def _create_feature_service(self, tx_storage: TransactionStorage) -> FeatureService: - return FeatureService( - feature_settings=self._settings.FEATURE_ACTIVATION, - tx_storage=tx_storage - ) - def _get_or_create_rocksdb_storage(self) -> RocksDBStorage: assert self._rocksdb_path is not None @@ -388,6 +401,29 @@ def _get_or_create_event_manager(self) -> EventManager: return self._event_manager + def _get_or_create_feature_service(self, tx_storage: TransactionStorage) -> FeatureService: + if self._feature_service is None: + settings = self._get_or_create_settings() + self._feature_service = FeatureService( + feature_settings=settings.FEATURE_ACTIVATION, + tx_storage=tx_storage + ) + + return self._feature_service + + def _get_or_create_bit_signaling_service(self, tx_storage: TransactionStorage) -> BitSignalingService: + if self._bit_signaling_service is None: + settings = self._get_or_create_settings() + self._bit_signaling_service = BitSignalingService( + feature_settings=settings.FEATURE_ACTIVATION, + feature_service=self._get_or_create_feature_service(tx_storage), + tx_storage=tx_storage, + support_features=self._support_features, + not_support_features=self._not_support_features, + ) + + return self._bit_signaling_service + def use_memory(self) -> 'Builder': self.check_if_can_modify() self._storage_type = StorageType.MEMORY @@ -559,3 +595,19 @@ def set_soft_voided_tx_ids(self, soft_voided_tx_ids: set[bytes]) -> 'Builder': self.check_if_can_modify() self._soft_voided_tx_ids = soft_voided_tx_ids return self + + def set_features( + self, + *, + support_features: Optional[set[Feature]], + not_support_features: Optional[set[Feature]] + ) -> 'Builder': + self.check_if_can_modify() + self._support_features = support_features or set() + self._not_support_features = not_support_features or set() + return self + + def set_settings(self, settings: HathorSettingsType) -> 'Builder': + self.check_if_can_modify() + self._settings = settings + return self diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index b057145f8..d8760d46e 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -20,12 +20,13 @@ from typing import Any, Optional from structlog import get_logger -from twisted.internet.posixbase import PosixReactorBase from hathor.cli.run_node import RunNodeArgs from hathor.consensus import ConsensusAlgorithm from hathor.event import EventManager from hathor.exception import BuilderError +from hathor.feature_activation.bit_signaling_service import BitSignalingService +from hathor.feature_activation.feature_service import FeatureService from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager from hathor.p2p.manager import ConnectionsManager @@ -33,7 +34,7 @@ from hathor.p2p.utils import discover_hostname from hathor.pubsub import PubSubManager from hathor.stratum import StratumFactory -from hathor.util import Random +from hathor.util import Random, Reactor from hathor.wallet import BaseWallet, HDWallet, Wallet logger = get_logger() @@ -53,7 +54,7 @@ def check_or_raise(self, condition: bool, message: str) -> None: if not condition: raise BuilderError(message) - def create_manager(self, reactor: PosixReactorBase) -> HathorManager: + def create_manager(self, reactor: Reactor) -> HathorManager: import hathor from hathor.conf import HathorSettings from hathor.conf.get_settings import get_settings_source @@ -189,6 +190,19 @@ def create_manager(self, reactor: PosixReactorBase) -> HathorManager: self.log.info('--x-enable-event-queue flag provided. ' 'The events detected by the full node will be stored and can be retrieved by clients') + self.feature_service = FeatureService( + feature_settings=settings.FEATURE_ACTIVATION, + tx_storage=tx_storage + ) + + bit_signaling_service = BitSignalingService( + feature_settings=settings.FEATURE_ACTIVATION, + feature_service=self.feature_service, + tx_storage=tx_storage, + support_features=self._args.signal_support, + not_support_features=self._args.signal_not_support + ) + p2p_manager = ConnectionsManager( reactor, network=network, @@ -216,7 +230,9 @@ def create_manager(self, reactor: PosixReactorBase) -> HathorManager: checkpoints=settings.CHECKPOINTS, environment_info=get_environment_info(args=str(self._args), peer_id=peer_id.id), full_verification=full_verification, - enable_event_queue=self._args.x_enable_event_queue + enable_event_queue=self._args.x_enable_event_queue, + feature_service=self.feature_service, + bit_signaling_service=bit_signaling_service ) p2p_manager.set_manager(self.manager) diff --git a/hathor/cli/nginx_config.py b/hathor/cli/nginx_config.py index f8d70fc39..d441a4842 100644 --- a/hathor/cli/nginx_config.py +++ b/hathor/cli/nginx_config.py @@ -199,11 +199,15 @@ def generate_nginx_config(openapi: dict[str, Any], *, out_file: TextIO, rate_k: websocket_max_conn_per_ip = 10000 mining_websocket_max_conn_global = 1000 mining_websocket_max_conn_per_ip = 1000 + event_websocket_max_conn_global = 1000 + event_websocket_max_conn_per_ip = 1000 else: websocket_max_conn_global = 4000 websocket_max_conn_per_ip = 10 mining_websocket_max_conn_global = 100 mining_websocket_max_conn_per_ip = 4 + event_websocket_max_conn_global = 100 + event_websocket_max_conn_per_ip = 4 header = f'''# THIS FILE WAS AUTOGENERATED BY THE `hathor-cli nginx-config` TOOL AT {datetime.now()} @@ -231,6 +235,8 @@ def generate_nginx_config(openapi: dict[str, Any], *, out_file: TextIO, rate_k: limit_conn_zone $per_ip_key zone=per_ip__ws:10m; limit_conn_zone $global_key zone=global__mining_ws:32k; limit_conn_zone $per_ip_key zone=per_ip__mining_ws:10m; +limit_conn_zone $global_key zone=global__event_ws:32k; +limit_conn_zone $per_ip_key zone=per_ip__event_ws:10m; ''' server_open = f''' @@ -282,6 +288,16 @@ def generate_nginx_config(openapi: dict[str, Any], *, out_file: TextIO, rate_k: proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; proxy_pass http://backend; + }} + location ~ ^/{api_prefix}/event_ws/?$ {{ + limit_conn global__event_ws {event_websocket_max_conn_global}; + limit_conn per_ip__event_ws {event_websocket_max_conn_per_ip}; + include cors_params; + include proxy_params; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_pass http://backend; }}''' # TODO: maybe return 403 instead? server_close = f''' diff --git a/hathor/cli/openapi_files/openapi_base.json b/hathor/cli/openapi_files/openapi_base.json index 4d55fe9ae..92d7dd989 100644 --- a/hathor/cli/openapi_files/openapi_base.json +++ b/hathor/cli/openapi_files/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.55.0" + "version": "0.56.0" }, "consumes": [ "application/json" diff --git a/hathor/cli/openapi_files/register.py b/hathor/cli/openapi_files/register.py index 3e531f97d..733f56848 100644 --- a/hathor/cli/openapi_files/register.py +++ b/hathor/cli/openapi_files/register.py @@ -34,6 +34,8 @@ def register_resource(resource_class: ResourceClass) -> ResourceClass: def get_registered_resources() -> list[type[Resource]]: """ Returns a list with all the resources registered for the docs """ + import hathor.event.resources.event # noqa: 401 + import hathor.feature_activation.resources.feature # noqa: 401 import hathor.p2p.resources # noqa: 401 import hathor.profiler.resources # noqa: 401 import hathor.stratum.resources # noqa: 401 diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 00cbebe0e..f3198fc26 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -156,21 +156,22 @@ def prepare(self, *, register_resources: bool = True) -> None: self.start_manager() if self._args.stratum: + assert self.manager.stratum_factory is not None self.reactor.listenTCP(self._args.stratum, self.manager.stratum_factory) from hathor.conf import HathorSettings - from hathor.feature_activation.feature_service import FeatureService settings = HathorSettings() - feature_service = FeatureService( - feature_settings=settings.FEATURE_ACTIVATION, - tx_storage=self.manager.tx_storage - ) - if register_resources: - resources_builder = ResourcesBuilder(self.manager, self._args, builder.event_ws_factory, feature_service) + resources_builder = ResourcesBuilder( + self.manager, + self._args, + builder.event_ws_factory, + builder.feature_service + ) status_server = resources_builder.build() if self._args.status: + assert status_server is not None self.reactor.listenTCP(self._args.status, status_server) from hathor.builder.builder import BuildArtifacts @@ -188,7 +189,6 @@ def prepare(self, *, register_resources: bool = True) -> None: wallet=self.manager.wallet, rocksdb_storage=getattr(builder, 'rocksdb_storage', None), stratum_factory=self.manager.stratum_factory, - feature_service=feature_service ) def start_sentry_if_possible(self) -> None: @@ -312,11 +312,14 @@ def check_unsafe_arguments(self) -> None: sys.exit(-1) def check_python_version(self) -> None: - MIN_VER = (3, 8) - RECOMMENDED_VER = (3, 9) + # comments to help grep's + MIN_VER = (3, 9) # Python-3.9 + MIN_STABLE = (3, 10) # Python-3.10 + RECOMMENDED_VER = (3, 10) # Python-3.10 cur = sys.version_info - min_pretty = '.'.join(map(str, MIN_VER)) cur_pretty = '.'.join(map(str, cur)) + min_pretty = '.'.join(map(str, MIN_VER)) + min_stable_pretty = '.'.join(map(str, MIN_STABLE)) recommended_pretty = '.'.join(map(str, RECOMMENDED_VER)) if cur < MIN_VER: self.log.critical('\n'.join([ @@ -329,6 +332,17 @@ def check_python_version(self) -> None: '', ])) sys.exit(-1) + elif cur < MIN_STABLE: + self.log.warning('\n'.join([ + '', + '********************************************************', + f'The detected Python version {cur_pretty} is deprecated and support for it will be removed in the' + ' next release.', + f'The minimum supported Python version will be {min_stable_pretty}', + f'The recommended Python version is {recommended_pretty}', + '********************************************************', + '', + ])) def __init__(self, *, argv=None): self.log = logger.new() @@ -375,11 +389,13 @@ def init_sysctl(self, description: str) -> None: from hathor.builder.sysctl_builder import SysctlBuilder from hathor.sysctl.factory import SysctlFactory + from hathor.sysctl.runner import SysctlRunner builder = SysctlBuilder(self.artifacts) root = builder.build() - factory = SysctlFactory(root) + runner = SysctlRunner(root) + factory = SysctlFactory(runner) endpoint = serverFromString(self.reactor, description) endpoint.listen(factory) diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index 1dad09eef..b79ac6fe4 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -14,6 +14,9 @@ from hathor.checkpoint import Checkpoint as cp from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.model.criteria import Criteria +from hathor.feature_activation.settings import Settings as FeatureActivationSettings SETTINGS = HathorSettings( P2PKH_VERSION_BYTE=b'\x49', @@ -51,4 +54,37 @@ cp(1_500_000, bytes.fromhex('000000000c3591805f4748480b59ac1788f754fc004930985a487580e2b5de8f')), cp(1_600_000, bytes.fromhex('00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2')), ], + FEATURE_ACTIVATION=FeatureActivationSettings( + enable_usage=True, + default_threshold=30240, + features={ + Feature.NOP_FEATURE_1: Criteria( + bit=0, + start_height=3_144_960, # N (right now the best block is 3093551 on testnet) + timeout_height=3_225_600, # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height=3_265_920, # N + 3 * 40320 (6 weeks after the start) + lock_in_on_timeout=False, + version='0.56.0', + signal_support_by_default=True + ), + Feature.NOP_FEATURE_2: Criteria( + bit=1, + start_height=3_144_960, # N (right now the best block is 3093551 on testnet) + timeout_height=3_225_600, # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=True, + version='0.56.0', + signal_support_by_default=False + ), + Feature.NOP_FEATURE_3: Criteria( + bit=2, + start_height=3_144_960, # N (right now the best block is 3093551 on testnet) + timeout_height=3_225_600, # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=False, + version='0.56.0', + signal_support_by_default=False + ) + } + ) ) diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index 98ad03430..81eaf1c58 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -36,6 +36,33 @@ CHECKPOINTS: 1_500_000: 000000000c3591805f4748480b59ac1788f754fc004930985a487580e2b5de8f 1_600_000: 00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2 -# TODO: Enable this config when settings via python modules are no longer used -# FEATURE_ACTIVATION: -# default_threshold: 30240 # 30240 = 75% of evaluation_interval (40320) +FEATURE_ACTIVATION: + enable_usage: true + default_threshold: 30_240 # 30240 = 75% of evaluation_interval (40320) + features: + NOP_FEATURE_1: + bit: 0 + start_height: 3_144_960 # N (right now the best block is 3093551 on testnet) + timeout_height: 3_225_600 # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height: 3_265_920 # N + 3 * 40320 (6 weeks after the start) + lock_in_on_timeout: false + version: 0.56.0 + signal_support_by_default: true + + NOP_FEATURE_2: + bit: 1 + start_height: 3_144_960 # N (right now the best block is 3093551 on testnet) + timeout_height: 3_225_600 # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: true + version: 0.56.0 + signal_support_by_default: false + + NOP_FEATURE_3: + bit: 2 + start_height: 3_144_960 # N (right now the best block is 3093551 on testnet) + timeout_height: 3_225_600 # N + 2 * 40320 (4 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.56.0 + signal_support_by_default: false diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index 185288751..acf3c5d6f 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -21,6 +21,7 @@ from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.transaction import BaseTransaction +from hathor.util import not_none logger = get_logger() settings = HathorSettings() @@ -110,7 +111,7 @@ def _unsafe_update(self, base: BaseTransaction) -> None: prev_block_tip=best_tip.hex(), new_block_tip=new_best_tip.hex()) # XXX: this method will mark as INVALID all transactions in the mempool that became invalid because of a # reward lock - to_remove = storage.compute_transactions_that_became_invalid() + to_remove = storage.compute_transactions_that_became_invalid(new_best_height) if to_remove: self.log.warn('some transactions on the mempool became invalid and will be removed', count=len(to_remove)) @@ -135,7 +136,8 @@ def _unsafe_update(self, base: BaseTransaction) -> None: reorg_size=reorg_size) # finally signal an index update for all affected transactions - for tx_affected in context.txs_affected: + sorted_txs_affected = sorted(context.txs_affected, key=lambda tx: not_none(tx.hash)) + for tx_affected in sorted_txs_affected: assert tx_affected.storage is not None assert tx_affected.storage.indexes is not None tx_affected.storage.indexes.update(tx_affected) diff --git a/hathor/debug_resources.py b/hathor/debug_resources.py index 1641dec16..6e050a63c 100644 --- a/hathor/debug_resources.py +++ b/hathor/debug_resources.py @@ -17,6 +17,7 @@ from structlog import get_logger from twisted.internet import defer +from twisted.internet.interfaces import IReactorFromThreads from twisted.web.http import Request from hathor.api_util import Resource, get_arg_default, get_args @@ -24,6 +25,7 @@ from hathor.exception import HathorError from hathor.manager import HathorManager from hathor.util import reactor +from hathor.utils.zope import asserted_cast logger = get_logger() @@ -61,7 +63,8 @@ def render_GET(self, request: Request) -> bytes: assert exc_cls_name in self.exc_class_map exc_cls = self.exc_class_map[exc_cls_name] msg = get_arg_default(raw_args, 'msg', self.default_msg) - reactor.callFromThread(self.run, exc_cls, msg) + threaded_reactor = asserted_cast(IReactorFromThreads, reactor) + threaded_reactor.callFromThread(self.run, exc_cls, msg) return b'OK: no side-effects\n' @@ -185,7 +188,8 @@ def render_GET(self, request: Request) -> bytes: mess = get_arg_default(get_args(request), 'with', self.default_mess) assert mess in self.mess_map mess_func = self.mess_map[mess] - reactor.callFromThread(mess_func) + threaded_reactor = asserted_cast(IReactorFromThreads, reactor) + threaded_reactor.callFromThread(mess_func) return b'OK: database yanked, full-node will break\n' diff --git a/hathor/event/event_manager.py b/hathor/event/event_manager.py index d93c0b8af..d8c0a10a4 100644 --- a/hathor/event/event_manager.py +++ b/hathor/event/event_manager.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, Iterable, Iterator, Optional +from typing import Callable, Iterator, Optional from structlog import get_logger @@ -24,9 +24,12 @@ from hathor.pubsub import EventArguments, HathorEvents, PubSubManager from hathor.transaction import BaseTransaction from hathor.util import Reactor, progress +from hathor.utils.iter import batch_iterator logger = get_logger() +N_LOAD_EVENTS_PER_BATCH = 10_000 + _GROUP_START_EVENTS = { EventType.REORG_STARTED, } @@ -258,20 +261,30 @@ def handle_load_phase_vertices( if not self._should_reload_events(): return - def create_event_batch() -> Iterable[BaseEvent]: - assert self._event_ws_factory is not None - self.log.info('Starting creating events from existing database...') + self.log.info('Started creating events from existing database...') + event_iterator = self._create_event_iterator(topological_iterator, total_vertices) + event_batches = batch_iterator(event_iterator, N_LOAD_EVENTS_PER_BATCH) + + for batch in event_batches: + self._event_storage.save_events(batch) - for vertex in progress(topological_iterator, log=self.log, total=total_vertices): - event = self._handle_event_creation( - event_type=EventType.NEW_VERTEX_ACCEPTED, - event_args=EventArguments(tx=vertex) - ) + self.log.info('Finished creating events from existing database.') - yield event - self._event_ws_factory.broadcast_event(event) - self._last_event = event + def _create_event_iterator( + self, + topological_iterator: Iterator[BaseTransaction], + total_vertices: int + ) -> Iterator[BaseEvent]: + """Given a topological iterator of txs, create an iterator of events while also tracking progress and + broadcasting them.""" + assert self._event_ws_factory is not None - self.log.info('Finished creating events from existing database.') + for vertex in progress(topological_iterator, log=self.log, total=total_vertices): + event = self._handle_event_creation( + event_type=EventType.NEW_VERTEX_ACCEPTED, + event_args=EventArguments(tx=vertex) + ) - self._event_storage.save_events(create_event_batch()) + yield event + self._event_ws_factory.broadcast_event(event) + self._last_event = event diff --git a/hathor/event/resources/event.py b/hathor/event/resources/event.py index b3b7dac38..b3fd588ab 100644 --- a/hathor/event/resources/event.py +++ b/hathor/event/resources/event.py @@ -76,7 +76,23 @@ class GetEventsResponse(Response): EventResource.openapi = { '/event': { - 'x-visibility': 'private', + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '50r/s', + 'burst': 100, + 'delay': 50 + } + ], + 'per-ip': [ + { + 'rate': '1r/s', + 'burst': 10, + 'delay': 3 + } + ] + }, 'get': { 'operationId': 'event', 'summary': 'Hathor Events', diff --git a/hathor/feature_activation/resources/feature.py b/hathor/feature_activation/resources/feature.py index 58c8070b4..f24579ddc 100644 --- a/hathor/feature_activation/resources/feature.py +++ b/hathor/feature_activation/resources/feature.py @@ -160,7 +160,23 @@ class GetFeaturesResponse(Response): FeatureResource.openapi = { '/feature': { - 'x-visibility': 'private', + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '50r/s', + 'burst': 100, + 'delay': 50 + } + ], + 'per-ip': [ + { + 'rate': '1r/s', + 'burst': 10, + 'delay': 3 + } + ] + }, 'get': { 'operationId': 'feature', 'summary': 'Feature Activation', diff --git a/hathor/feature_activation/settings.py b/hathor/feature_activation/settings.py index f9505db12..da4849ef6 100644 --- a/hathor/feature_activation/settings.py +++ b/hathor/feature_activation/settings.py @@ -41,6 +41,9 @@ class Settings(BaseModel, validate_all=True): # neither their values changed, to preserve history. features: dict[Feature, Criteria] = {} + # Boolean indicating whether feature activation can be used. + enable_usage: bool = False + @validator('default_threshold') def _validate_default_threshold(cls, default_threshold: int, values: dict[str, Any]) -> int: """Validates that the default_threshold is not greater than the evaluation_interval.""" diff --git a/hathor/manager.py b/hathor/manager.py index 2d0b14577..4f6284217 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -39,6 +39,9 @@ RewardLockedError, SpendingVoidedError, ) +from hathor.feature_activation.bit_signaling_service import BitSignalingService +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import FeatureService from hathor.mining import BlockTemplate, BlockTemplates from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer_discovery import PeerDiscovery @@ -97,6 +100,8 @@ def __init__(self, tx_storage: TransactionStorage, p2p_manager: ConnectionsManager, event_manager: EventManager, + feature_service: FeatureService, + bit_signaling_service: BitSignalingService, network: str, hostname: Optional[str] = None, wallet: Optional[BaseWallet] = None, @@ -170,6 +175,9 @@ def __init__(self, self._event_manager.save_event_queue_state(enable_event_queue) self._enable_event_queue = enable_event_queue + self._feature_service = feature_service + self._bit_signaling_service = bit_signaling_service + self.consensus_algorithm = consensus_algorithm self.peer_discoveries: list[PeerDiscovery] = [] @@ -261,6 +269,8 @@ def start(self) -> None: if self._enable_event_queue: self._event_manager.start(not_none(self.my_peer.id)) + self._bit_signaling_service.start() + self.state = self.NodeState.INITIALIZING self.pubsub.publish(HathorEvents.MANAGER_ON_START) self._event_manager.load_started() @@ -838,12 +848,13 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur parents_any=parents_any, height=height, score=sum_weights(parent_block_metadata.score, weight), + signal_bits=self._bit_signaling_service.generate_signal_bits(block=parent_block) ) def generate_mining_block(self, timestamp: Optional[int] = None, parent_block_hash: Optional[VertexId] = None, data: bytes = b'', address: Optional[Address] = None, - merge_mined: bool = False, signal_bits: int = 0) -> Union[Block, MergeMinedBlock]: + merge_mined: bool = False) -> Union[Block, MergeMinedBlock]: """ Generates a block ready to be mined. The block includes new issued tokens, parents, and the weight. @@ -860,7 +871,6 @@ def generate_mining_block(self, timestamp: Optional[int] = None, merge_mined=merge_mined, address=address or None, # XXX: because we allow b'' for explicit empty output script data=data, - signal_bits=signal_bits ) return block @@ -1090,6 +1100,33 @@ def tx_fully_validated(self, tx: BaseTransaction, *, quiet: bool) -> None: self.wallet.on_new_tx(tx) self.log_new_object(tx, 'new {}', quiet=quiet) + self._log_feature_states(tx) + + def _log_feature_states(self, vertex: BaseTransaction) -> None: + """Log features states for a block. Used as part of the Feature Activation Phased Testing.""" + if not settings.FEATURE_ACTIVATION.enable_usage or not isinstance(vertex, Block): + return + + feature_descriptions = self._feature_service.get_bits_description(block=vertex) + state_by_feature = { + feature.value: description.state.value + for feature, description in feature_descriptions.items() + } + + self.log.info( + 'New block accepted with feature activation states', + block_height=vertex.get_height(), + features_states=state_by_feature + ) + + features = [Feature.NOP_FEATURE_1, Feature.NOP_FEATURE_2, Feature.NOP_FEATURE_3] + for feature in features: + self._log_if_feature_is_active(vertex, feature) + + def _log_if_feature_is_active(self, block: Block, feature: Feature) -> None: + """Log if a feature is ACTIVE for a block. Used as part of the Feature Activation Phased Testing.""" + if self._feature_service.is_feature_active(block=block, feature=feature): + self.log.info('Feature is ACTIVE for block', feature=feature.value, block_height=block.get_height()) def listen(self, description: str, use_ssl: Optional[bool] = None) -> None: endpoint = self.connections.listen(description, use_ssl) diff --git a/hathor/mining/block_template.py b/hathor/mining/block_template.py index 007ce89ea..6a77c274e 100644 --- a/hathor/mining/block_template.py +++ b/hathor/mining/block_template.py @@ -34,6 +34,7 @@ class BlockTemplate(NamedTuple): parents_any: list[bytes] # list of extra parents to choose from when there are more options height: int # metadata score: float # metadata + signal_bits: int # signal bits for blocks generated from this template def generate_minimaly_valid_block(self) -> BaseTransaction: """ Generates a block, without any extra information that is valid for this template. No random choices.""" @@ -47,8 +48,8 @@ def generate_minimaly_valid_block(self) -> BaseTransaction: def generate_mining_block(self, rng: Random, merge_mined: bool = False, address: Optional[bytes] = None, timestamp: Optional[int] = None, data: Optional[bytes] = None, - storage: Optional[TransactionStorage] = None, include_metadata: bool = False, - signal_bits: int = 0) -> Union[Block, MergeMinedBlock]: + storage: Optional[TransactionStorage] = None, include_metadata: bool = False + ) -> Union[Block, MergeMinedBlock]: """ Generates a block by filling the template with the given options and random parents (if multiple choices). Note that if a timestamp is given it will be coerced into the [timestamp_min, timestamp_max] range. @@ -64,7 +65,7 @@ def generate_mining_block(self, rng: Random, merge_mined: bool = False, address: tx_outputs = [TxOutput(self.reward, output_script)] cls: Union[type['Block'], type['MergeMinedBlock']] = MergeMinedBlock if merge_mined else Block block = cls(outputs=tx_outputs, parents=parents, timestamp=block_timestamp, - data=data or b'', storage=storage, weight=self.weight, signal_bits=signal_bits) + data=data or b'', storage=storage, weight=self.weight, signal_bits=self.signal_bits) if include_metadata: block._metadata = TransactionMetadata(height=self.height, score=self.score) block.get_metadata(use_storage=False) @@ -93,6 +94,7 @@ def to_dict(self) -> dict: 'parents_any': [p.hex() for p in self.parents_any], 'height': self.height, 'score': self.score, + 'signal_bits': self.signal_bits, } @classmethod @@ -108,6 +110,7 @@ def from_dict(cls, data: dict) -> 'BlockTemplate': parents_any=[bytes.fromhex(p) for p in data['parents_any']], height=int(data['height']), score=int(data['score']), + signal_bits=int(data.get('signal_bits', 0)), ) @@ -129,5 +132,4 @@ def generate_mining_block(self, rng: Random, merge_mined: bool = False, address: return self.choose_random_template(rng).generate_mining_block(rng, merge_mined=merge_mined, address=address, timestamp=timestamp, data=data, storage=storage or self.storage, - include_metadata=include_metadata, - signal_bits=signal_bits) + include_metadata=include_metadata) diff --git a/hathor/p2p/resources/status.py b/hathor/p2p/resources/status.py index bda60cc1c..7287c6c1a 100644 --- a/hathor/p2p/resources/status.py +++ b/hathor/p2p/resources/status.py @@ -17,8 +17,12 @@ import hathor from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource +from hathor.conf import HathorSettings +from hathor.p2p.utils import to_serializable_best_blockchain from hathor.util import json_dumpb +settings = HathorSettings() + @register_resource class StatusResource(Resource): @@ -70,6 +74,7 @@ def render_GET(self, request): 'plugins': status, 'warning_flags': [flag.value for flag in conn.warning_flags], 'protocol_version': str(conn.sync_version), + 'peer_best_blockchain': to_serializable_best_blockchain(conn.state.peer_best_blockchain), }) known_peers = [] @@ -89,6 +94,8 @@ def render_GET(self, request): best_block_tips.append({'hash': tx.hash_hex, 'height': meta.height}) best_block = self.manager.tx_storage.get_best_block() + raw_best_blockchain = self.manager.tx_storage.get_n_height_tips(settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) + best_blockchain = to_serializable_best_blockchain(raw_best_blockchain) data = { 'server': { @@ -114,11 +121,36 @@ def render_GET(self, request): 'hash': best_block.hash_hex, 'height': best_block.get_metadata().height, }, + 'best_blockchain': best_blockchain, } } return json_dumpb(data) +_openapi_height_info = [59, '0000045de9ac8365c43ccc96222873cb80c340c6c9c8949b56d2e2e51b6a3dbe'] +_openapi_connected_peer = { + 'id': '5578ab3bcaa861fb9d07135b8b167dd230d4487b147be8fd2c94a79bd349d123', + 'app_version': 'Hathor v0.14.0-beta', + 'uptime': 118.37029600143433, + 'address': '192.168.1.1:54321', + 'state': 'READY', + 'last_message': 1539271481, + 'plugins': { + 'node-sync-timestamp': { + 'is_enabled': True, + 'latest_timestamp': 1685310912, + 'synced_timestamp': 1685310912 + } + }, + 'warning_flags': ['no_entrypoints'], + 'protocol_version': 'sync-v1.1', + 'peer_best_blockchain': [_openapi_height_info] +} +_openapi_connecting_peer = { + 'deferred': '>', # noqa + 'address': '192.168.1.1:54321' +} + StatusResource.openapi = { '/status': { 'x-visibility': 'public', @@ -164,7 +196,7 @@ def render_GET(self, request): }, 'known_peers': [], 'connections': { - 'connected_peers': [], + 'connected_peers': [_openapi_connected_peer], 'handshaking_peers': [ { 'address': '192.168.1.1:54321', @@ -173,28 +205,24 @@ def render_GET(self, request): 'app_version': 'Unknown' } ], - 'connecting_peers': [ - { - 'deferred': ('>'), - 'address': '192.168.1.1:54321' - } - ] + 'connecting_peers': [_openapi_connecting_peer] }, 'dag': { 'first_timestamp': 1539271481, 'latest_timestamp': 1539271483, 'best_block_tips': [ { - 'hash': '000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038', # noqa: E501 + 'hash': + '000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038', # noqa 'height': 0 } ], 'best_block': { - 'hash': '000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038', # noqa: E501 + 'hash': + '000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038', # noqa 'height': 0 - } + }, + 'best_blockchain': [_openapi_height_info] } } } diff --git a/hathor/p2p/states/ready.py b/hathor/p2p/states/ready.py index b6813c9c4..19d5bddc0 100644 --- a/hathor/p2p/states/ready.py +++ b/hathor/p2p/states/ready.py @@ -23,8 +23,8 @@ from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId from hathor.p2p.states.base import BaseState -from hathor.p2p.states.utils import to_height_info from hathor.p2p.sync_agent import SyncAgent +from hathor.p2p.utils import to_height_info, to_serializable_best_blockchain from hathor.transaction import BaseTransaction from hathor.util import json_dumps, json_loads @@ -233,7 +233,6 @@ def handle_get_best_blockchain(self, payload: str) -> None: f'N out of bounds. Valid range: [1, {settings.MAX_BEST_BLOCKCHAIN_BLOCKS}].' ) return - self.protocol.my_peer best_blockchain = self.protocol.node.tx_storage.get_n_height_tips(n_blocks) self.send_best_blockchain(best_blockchain) @@ -241,7 +240,7 @@ def handle_get_best_blockchain(self, payload: str) -> None: def send_best_blockchain(self, best_blockchain: list[HeightInfo]) -> None: """ Send a BEST-BLOCKCHAIN command with a best blockchain of N blocks. """ - serialiable_best_blockchain = [(hi.height, hi.id.hex()) for hi in best_blockchain] + serialiable_best_blockchain = to_serializable_best_blockchain(best_blockchain) self.send_message(ProtocolMessages.BEST_BLOCKCHAIN, json_dumps(serialiable_best_blockchain)) def handle_best_blockchain(self, payload: str) -> None: diff --git a/hathor/p2p/states/utils.py b/hathor/p2p/states/utils.py deleted file mode 100644 index 317077f3b..000000000 --- a/hathor/p2p/states/utils.py +++ /dev/null @@ -1,24 +0,0 @@ -import re - -from hathor.indexes.height_index import HeightInfo - - -def to_height_info(raw: tuple[int, str]) -> HeightInfo: - """ Instantiate HeightInfo from a literal tuple. - """ - if not (isinstance(raw, list) and len(raw) == 2): - raise ValueError(f"block_info_raw must be a tuple with length 3. We got {raw}.") - - height, id = raw - - if not isinstance(id, str): - raise ValueError(f"hash_hex must be a string. We got {id}.") - hash_pattern = r'[a-fA-F\d]{64}' - if not re.match(hash_pattern, id): - raise ValueError(f"hash_hex must be valid. We got {id}.") - if not isinstance(height, int): - raise ValueError(f"height must be an integer. We got {height}.") - if height < 0: - raise ValueError(f"height must greater than or equal to 0. We got {height}.") - - return HeightInfo(height, bytes.fromhex(id)) diff --git a/hathor/p2p/sync_v1/agent.py b/hathor/p2p/sync_v1/agent.py index 7ec670065..c2f90e0d5 100644 --- a/hathor/p2p/sync_v1/agent.py +++ b/hathor/p2p/sync_v1/agent.py @@ -31,7 +31,8 @@ from hathor.transaction import BaseTransaction from hathor.transaction.base_transaction import tx_or_block_from_bytes from hathor.transaction.storage.exceptions import TransactionDoesNotExist -from hathor.util import Reactor, json_dumps, json_loads, verified_cast +from hathor.util import Reactor, json_dumps, json_loads +from hathor.utils.zope import asserted_cast settings = HathorSettings() logger = get_logger() @@ -60,8 +61,8 @@ def __init__(self, node_sync: 'NodeSyncTimestamp'): self.node_sync = node_sync self.protocol: 'HathorProtocol' = node_sync.protocol assert self.protocol.transport is not None - self.consumer = verified_cast(IConsumer, self.protocol.transport) - + consumer = asserted_cast(IConsumer, self.protocol.transport) + self.consumer = consumer self.is_running: bool = False self.is_producing: bool = False diff --git a/hathor/p2p/sync_v2/manager.py b/hathor/p2p/sync_v2/agent.py similarity index 100% rename from hathor/p2p/sync_v2/manager.py rename to hathor/p2p/sync_v2/agent.py diff --git a/hathor/p2p/sync_v2/factory.py b/hathor/p2p/sync_v2/factory.py index 40b2b8294..defb37283 100644 --- a/hathor/p2p/sync_v2/factory.py +++ b/hathor/p2p/sync_v2/factory.py @@ -17,7 +17,7 @@ from hathor.p2p.manager import ConnectionsManager from hathor.p2p.sync_agent import SyncAgent from hathor.p2p.sync_factory import SyncAgentFactory -from hathor.p2p.sync_v2.manager import NodeBlockSync +from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.util import Reactor if TYPE_CHECKING: diff --git a/hathor/p2p/sync_v2/mempool.py b/hathor/p2p/sync_v2/mempool.py index da7f5d040..97020eff1 100644 --- a/hathor/p2p/sync_v2/mempool.py +++ b/hathor/p2p/sync_v2/mempool.py @@ -21,7 +21,7 @@ from hathor.transaction import BaseTransaction if TYPE_CHECKING: - from hathor.p2p.sync_v2.manager import NodeBlockSync + from hathor.p2p.sync_v2.agent import NodeBlockSync logger = get_logger() diff --git a/hathor/p2p/sync_v2/streamers.py b/hathor/p2p/sync_v2/streamers.py index 968741c65..191cb9372 100644 --- a/hathor/p2p/sync_v2/streamers.py +++ b/hathor/p2p/sync_v2/streamers.py @@ -21,11 +21,11 @@ from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.storage.traversal import BFSOrderWalk -from hathor.util import verified_cast +from hathor.utils.zope import asserted_cast if TYPE_CHECKING: from hathor.p2p.protocol import HathorProtocol - from hathor.p2p.sync_v2.manager import NodeBlockSync + from hathor.p2p.sync_v2.agent import NodeBlockSync logger = get_logger() @@ -60,7 +60,8 @@ def __init__(self, node_sync: 'NodeBlockSync', *, limit: int = DEFAULT_STREAMING self.node_sync = node_sync self.protocol: 'HathorProtocol' = node_sync.protocol assert self.protocol.transport is not None - self.consumer = verified_cast(IConsumer, self.protocol.transport) + consumer = asserted_cast(IConsumer, self.protocol.transport) + self.consumer = consumer self.counter = 0 self.limit = limit diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index e9c778807..745ab05ee 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -13,6 +13,7 @@ # limitations under the License. import datetime +import re from typing import Any, Generator, Optional from urllib.parse import parse_qs, urlparse @@ -28,6 +29,7 @@ from twisted.internet.interfaces import IAddress from hathor.conf import HathorSettings +from hathor.indexes.height_index import HeightInfo from hathor.p2p.peer_discovery import DNSPeerDiscovery from hathor.transaction.genesis import GENESIS_HASH @@ -200,3 +202,30 @@ def format_address(addr: IAddress) -> str: return f'{host}:{port}' else: return str(addr) + + +def to_height_info(raw: tuple[int, str]) -> HeightInfo: + """ Instantiate HeightInfo from a literal tuple. + """ + if not (isinstance(raw, list) and len(raw) == 2): + raise ValueError(f"height_info_raw must be a tuple with length 2. We got {raw}.") + + height, id = raw + + if not isinstance(id, str): + raise ValueError(f"id (hash) must be a string. We got {id}.") + hash_pattern = r'[a-fA-F\d]{64}' + if not re.match(hash_pattern, id): + raise ValueError(f"id (hash) must be valid. We got {id}.") + if not isinstance(height, int): + raise ValueError(f"height must be an integer. We got {height}.") + if height < 0: + raise ValueError(f"height must be greater than or equal to 0. We got {height}.") + + return HeightInfo(height, bytes.fromhex(id)) + + +def to_serializable_best_blockchain(best_blockchain: list[HeightInfo]) -> list[tuple[int, str]]: + """ Converts the list of HeightInfo to a tuple list that can be serializable to json afterwards. + """ + return [(hi.height, hi.id.hex()) for hi in best_blockchain] diff --git a/hathor/pubsub.py b/hathor/pubsub.py index e410c3fac..b9c5506c3 100644 --- a/hathor/pubsub.py +++ b/hathor/pubsub.py @@ -14,11 +14,13 @@ from collections import defaultdict, deque from enum import Enum -from typing import TYPE_CHECKING, Any, Callable, cast +from typing import TYPE_CHECKING, Any, Callable from twisted.internet.interfaces import IReactorFromThreads +from twisted.python.threadable import isInIOThread -from hathor.util import Reactor, ReactorThread +from hathor.util import Reactor +from hathor.utils.zope import verified_cast if TYPE_CHECKING: from hathor.transaction import BaseTransaction, Block @@ -187,7 +189,7 @@ def unsubscribe(self, key: HathorEvents, fn: PubSubCallable) -> None: if fn in self._subscribers[key]: self._subscribers[key].remove(fn) - def _call_next(self): + def _call_next(self) -> None: """Execute next call if it exists.""" if not self.queue: return @@ -196,19 +198,17 @@ def _call_next(self): if self.queue: self._schedule_call_next() - def _schedule_call_next(self): + def _schedule_call_next(self) -> None: """Schedule next call's execution.""" - reactor_thread = ReactorThread.get_current_thread(self.reactor) - if reactor_thread == ReactorThread.MAIN_THREAD: - self.reactor.callLater(0, self._call_next) - elif reactor_thread == ReactorThread.NOT_MAIN_THREAD: - # XXX: does this always hold true? an assert could be tricky because it is a zope.interface - reactor = cast(IReactorFromThreads, self.reactor) + assert self.reactor.running + + if not isInIOThread() and (threaded_reactor := verified_cast(IReactorFromThreads, self.reactor)): # We're taking a conservative approach, since not all functions might need to run # on the main thread [yan 2019-02-20] - reactor.callFromThread(self._call_next) - else: - raise NotImplementedError + threaded_reactor.callFromThread(self._call_next) + return + + self.reactor.callLater(0, self._call_next) def publish(self, key: HathorEvents, **kwargs: Any) -> None: """Publish a new event. @@ -219,11 +219,9 @@ def publish(self, key: HathorEvents, **kwargs: Any) -> None: :param **kwargs: Named arguments to be given to the functions that will be called with this event. :type **kwargs: dict """ - reactor_thread = ReactorThread.get_current_thread(self.reactor) - args = EventArguments(**kwargs) for fn in self._subscribers[key]: - if reactor_thread == ReactorThread.NOT_RUNNING: + if not self.reactor.running: fn(key, args) else: is_empty = bool(not self.queue) diff --git a/hathor/reactor/__init__.py b/hathor/reactor/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathor/reactor/reactor.py b/hathor/reactor/reactor.py new file mode 100644 index 000000000..10bda8e98 --- /dev/null +++ b/hathor/reactor/reactor.py @@ -0,0 +1,31 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import cast + +from twisted.internet import reactor as twisted_reactor +from twisted.internet.interfaces import IReactorCore, IReactorTCP, IReactorTime +from zope.interface.verify import verifyObject + +from hathor.reactor.reactor_protocol import ReactorProtocol + +assert verifyObject(IReactorTime, twisted_reactor) is True +assert verifyObject(IReactorCore, twisted_reactor) is True +assert verifyObject(IReactorTCP, twisted_reactor) is True + +""" +This variable is the global reactor that should be imported to use the Twisted reactor. +It's cast to ReactorProtocol, our own type that stubs the necessary Twisted zope interfaces, to aid typing. +""" +reactor = cast(ReactorProtocol, twisted_reactor) diff --git a/hathor/reactor/reactor_core_protocol.py b/hathor/reactor/reactor_core_protocol.py new file mode 100644 index 000000000..e3d47e2ca --- /dev/null +++ b/hathor/reactor/reactor_core_protocol.py @@ -0,0 +1,74 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import TYPE_CHECKING, Any, Callable, Optional, Protocol, Sequence + +from twisted.internet.interfaces import IReactorCore +from zope.interface import implementer + +if TYPE_CHECKING: + from twisted.internet.defer import Deferred + + +@implementer(IReactorCore) +class ReactorCoreProtocol(Protocol): + """ + A Python protocol that stubs Twisted's IReactorCore interface. + """ + + running: bool + + @abstractmethod + def resolve(self, name: str, timeout: Sequence[int]) -> 'Deferred[str]': + raise NotImplementedError + + @abstractmethod + def run(self) -> None: + raise NotImplementedError + + @abstractmethod + def stop(self) -> None: + raise NotImplementedError + + @abstractmethod + def crash(self) -> None: + raise NotImplementedError + + @abstractmethod + def iterate(self, delay: float) -> None: + raise NotImplementedError + + @abstractmethod + def fireSystemEvent(self, eventType: str) -> None: + raise NotImplementedError + + @abstractmethod + def addSystemEventTrigger( + self, + phase: str, + eventType: str, + callable: Callable[..., Any], + *args: object, + **kwargs: object, + ) -> Any: + raise NotImplementedError + + @abstractmethod + def removeSystemEventTrigger(self, triggerID: Any) -> None: + raise NotImplementedError + + @abstractmethod + def callWhenRunning(self, callable: Callable[..., Any], *args: object, **kwargs: object) -> Optional[Any]: + raise NotImplementedError diff --git a/hathor/reactor/reactor_protocol.py b/hathor/reactor/reactor_protocol.py new file mode 100644 index 000000000..7c301d052 --- /dev/null +++ b/hathor/reactor/reactor_protocol.py @@ -0,0 +1,31 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Protocol + +from hathor.reactor.reactor_core_protocol import ReactorCoreProtocol +from hathor.reactor.reactor_tcp_protocol import ReactorTCPProtocol +from hathor.reactor.reactor_time_protocol import ReactorTimeProtocol + + +class ReactorProtocol( + ReactorCoreProtocol, + ReactorTimeProtocol, + ReactorTCPProtocol, + Protocol, +): + """ + A Python protocol that represents the intersection of Twisted's IReactorCore+IReactorTime+IReactorTCP interfaces. + """ + pass diff --git a/hathor/reactor/reactor_tcp_protocol.py b/hathor/reactor/reactor_tcp_protocol.py new file mode 100644 index 000000000..f8e1d671d --- /dev/null +++ b/hathor/reactor/reactor_tcp_protocol.py @@ -0,0 +1,51 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import TYPE_CHECKING, Optional, Protocol + +from twisted.internet.interfaces import IReactorTCP +from zope.interface import implementer + +if TYPE_CHECKING: + from twisted.internet.interfaces import IConnector, IListeningPort + from twisted.internet.protocol import ClientFactory, ServerFactory + + +@implementer(IReactorTCP) +class ReactorTCPProtocol(Protocol): + """ + A Python protocol that stubs Twisted's IReactorTCP interface. + """ + + @abstractmethod + def listenTCP( + self, + port: int, + factory: 'ServerFactory', + backlog: int = 0, + interface: str = '' + ) -> 'IListeningPort': + raise NotImplementedError + + @abstractmethod + def connectTCP( + self, + host: str, + port: int, + factory: 'ClientFactory', + timeout: float, + bindAddress: Optional[tuple[str, int]], + ) -> 'IConnector': + raise NotImplementedError diff --git a/hathor/reactor/reactor_time_protocol.py b/hathor/reactor/reactor_time_protocol.py new file mode 100644 index 000000000..ad9a748f6 --- /dev/null +++ b/hathor/reactor/reactor_time_protocol.py @@ -0,0 +1,41 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import TYPE_CHECKING, Any, Callable, Protocol, Sequence + +from twisted.internet.interfaces import IReactorTime +from zope.interface import implementer + +if TYPE_CHECKING: + from twisted.internet.interfaces import IDelayedCall + + +@implementer(IReactorTime) +class ReactorTimeProtocol(Protocol): + """ + A Python protocol that stubs Twisted's IReactorTime interface. + """ + + @abstractmethod + def seconds(self) -> float: + raise NotImplementedError + + @abstractmethod + def callLater(self, delay: float, callable: Callable[..., Any], *args: object, **kwargs: object) -> 'IDelayedCall': + raise NotImplementedError + + @abstractmethod + def getDelayedCalls(self) -> Sequence['IDelayedCall']: + raise NotImplementedError diff --git a/hathor/simulator/clock.py b/hathor/simulator/clock.py index db5f466e4..3e0aeb4f5 100644 --- a/hathor/simulator/clock.py +++ b/hathor/simulator/clock.py @@ -94,3 +94,10 @@ class MemoryReactorHeapClock(MemoryReactor, HeapClock): def __init__(self): MemoryReactor.__init__(self) HeapClock.__init__(self) + + def run(self): + """ + We have to override MemoryReactor.run() because the original Twisted implementation weirdly calls stop() inside + run(), and we need the reactor running during our tests. + """ + self.running = True diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index 9db893559..5f7581611 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -101,6 +101,16 @@ def is_both_synced(self) -> bool: if not state1_is_synced or not state2_is_synced: self.log.debug('peer not synced', peer1_synced=state1_is_synced, peer2_synced=state2_is_synced) return False + [best_block_info1] = state1.protocol.node.tx_storage.get_n_height_tips(1) + [best_block_info2] = state2.protocol.node.tx_storage.get_n_height_tips(1) + if best_block_info1.id != best_block_info2.id: + self.log.debug('best block is different') + return False + tips1 = {i.data for i in state1.protocol.node.tx_storage.get_tx_tips()} + tips2 = {i.data for i in state2.protocol.node.tx_storage.get_tx_tips()} + if tips1 != tips2: + self.log.debug('tx tips are different') + return False return True def can_step(self) -> bool: diff --git a/hathor/simulator/miner/geometric_miner.py b/hathor/simulator/miner/geometric_miner.py index be8d71cc3..53a0b8c0d 100644 --- a/hathor/simulator/miner/geometric_miner.py +++ b/hathor/simulator/miner/geometric_miner.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import math from typing import TYPE_CHECKING, Optional from hathor.conf import HathorSettings @@ -50,6 +51,7 @@ def __init__( self._signal_bits = signal_bits or [] self._block: Optional[Block] = None self._blocks_found: int = 0 + self._blocks_before_pause: float = math.inf def _on_new_tx(self, key: HathorEvents, args: 'EventArguments') -> None: """ Called when a new tx or block is received. It updates the current mining to the @@ -75,15 +77,23 @@ def _generate_mining_block(self) -> 'Block': except IndexError: signal_bits = 0 - return self._manager.generate_mining_block(signal_bits=signal_bits) + block = self._manager.generate_mining_block() + block.signal_bits = signal_bits + + return block def _schedule_next_block(self): + if self._blocks_before_pause <= 0: + self._delayed_call = None + return + if self._block: self._block.nonce = self._rng.getrandbits(32) self._block.update_hash() self.log.debug('randomized step: found new block', hash=self._block.hash_hex, nonce=self._block.nonce) self._manager.propagate_tx(self._block, fails_silently=False) self._blocks_found += 1 + self._blocks_before_pause -= 1 self._block = None if self._manager.can_start_mining(): @@ -107,3 +117,16 @@ def _schedule_next_block(self): def get_blocks_found(self) -> int: return self._blocks_found + + def pause_after_exactly(self, *, n_blocks: int) -> None: + """ + Configure the miner to pause mining blocks after exactly `n_blocks` are propagated. If called more than once, + will unpause the miner and pause again according to the new argument. + + Use this instead of the `StopAfterNMinedBlocks` trigger if you need "exactly N blocks" behavior, instead of + "at least N blocks". + """ + self._blocks_before_pause = n_blocks + + if not self._delayed_call: + self._delayed_call = self._clock.callLater(0, self._schedule_next_block) diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index 4e523fd02..9ecc805df 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -25,7 +25,7 @@ from hathor.daa import TestMode, _set_test_mode from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId -from hathor.simulator.clock import HeapClock +from hathor.simulator.clock import HeapClock, MemoryReactorHeapClock from hathor.simulator.miner.geometric_miner import GeometricMiner from hathor.simulator.tx_generator import RandomTransactionGenerator from hathor.transaction.genesis import _get_genesis_transactions_unsafe @@ -118,7 +118,7 @@ def __init__(self, seed: Optional[int] = None): self.rng = Random(self.seed) self.settings = HathorSettings() self._network = 'testnet' - self._clock = HeapClock() + self._clock = MemoryReactorHeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() self._connections: list['FakeConnection'] = [] self._started = False @@ -178,6 +178,7 @@ def create_artifacts(self, builder: Optional[Builder] = None) -> BuildArtifacts: .build() artifacts.manager.start() + self._clock.run() self.run_to_completion() # Don't use it anywhere else. It is unsafe to generate mnemonic words like this. diff --git a/hathor/simulator/trigger.py b/hathor/simulator/trigger.py index 2d54831f5..5745523ce 100644 --- a/hathor/simulator/trigger.py +++ b/hathor/simulator/trigger.py @@ -31,7 +31,12 @@ def should_stop(self) -> bool: class StopAfterNMinedBlocks(Trigger): - """Stop the simulation after `miner` finds N blocks. Note that these blocks might be orphan.""" + """ + Stop the simulation after `miner` finds at least N blocks. Note that these blocks might be orphan. + + Use `miner.pause_after_exactly()` instead of this trigger if you need "exactly N blocks" behavior, instead of + "at least N blocks". + """ def __init__(self, miner: 'AbstractMiner', *, quantity: int) -> None: self.miner = miner self.quantity = quantity diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index 2c47f1e5c..bc3d79492 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -29,7 +29,7 @@ from twisted.internet import task from twisted.internet.defer import Deferred from twisted.internet.interfaces import IAddress, IDelayedCall -from twisted.internet.protocol import Factory, connectionDone +from twisted.internet.protocol import ServerFactory, connectionDone from twisted.protocols.basic import LineReceiver from twisted.python.failure import Failure @@ -717,7 +717,7 @@ def get_stats(self) -> MinerStatistics: ) -class StratumFactory(Factory): +class StratumFactory(ServerFactory): """ Twisted factory of server Hathor Stratum protocols. Interfaces with nodes to keep mining jobs up to date and to submit successful ones. diff --git a/hathor/sysctl/exception.py b/hathor/sysctl/exception.py index 2fb05f862..a70f19f33 100644 --- a/hathor/sysctl/exception.py +++ b/hathor/sysctl/exception.py @@ -26,3 +26,7 @@ class SysctlReadOnlyEntry(SysctlException): class SysctlWriteOnlyEntry(SysctlException): pass + + +class SysctlRunnerException(SysctlException): + pass diff --git a/hathor/sysctl/factory.py b/hathor/sysctl/factory.py index b9b1101e7..aedd1bc00 100644 --- a/hathor/sysctl/factory.py +++ b/hathor/sysctl/factory.py @@ -12,19 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING - from twisted.internet.protocol import Factory from hathor.sysctl.protocol import SysctlProtocol - -if TYPE_CHECKING: - from hathor.sysctl.sysctl import Sysctl +from hathor.sysctl.runner import SysctlRunner class SysctlFactory(Factory): - def __init__(self, root: 'Sysctl') -> None: - self.root = root + def __init__(self, runner: SysctlRunner) -> None: + self.runner = runner def buildProtocol(self, addr): - return SysctlProtocol(self.root) + return SysctlProtocol(self.runner) diff --git a/hathor/sysctl/protocol.py b/hathor/sysctl/protocol.py index ac60965f2..e717285ec 100644 --- a/hathor/sysctl/protocol.py +++ b/hathor/sysctl/protocol.py @@ -13,85 +13,69 @@ # limitations under the License. import inspect -import json -from typing import TYPE_CHECKING, Any, Callable, Optional +from typing import Callable, Optional from pydantic import ValidationError from twisted.protocols.basic import LineReceiver -from hathor.sysctl.exception import SysctlEntryNotFound, SysctlException, SysctlReadOnlyEntry, SysctlWriteOnlyEntry - -if TYPE_CHECKING: - from hathor.sysctl.sysctl import Sysctl +from hathor.sysctl.exception import ( + SysctlEntryNotFound, + SysctlException, + SysctlReadOnlyEntry, + SysctlRunnerException, + SysctlWriteOnlyEntry, +) +from hathor.sysctl.runner import SysctlRunner class SysctlProtocol(LineReceiver): delimiter = b'\n' - def __init__(self, root: 'Sysctl') -> None: - self.root = root + def __init__(self, runner: SysctlRunner) -> None: + self.runner = runner def lineReceived(self, raw: bytes) -> None: try: line = raw.decode('utf-8').strip() except UnicodeDecodeError: self.sendError('command is not utf-8 valid') + if line.startswith('!help'): _, _, path = line.partition(' ') self.help(path) return - elif line == '!backup': + elif line.startswith('!backup'): self.backup() return - head, separator, tail = line.partition('=') - head = head.strip() - tail = tail.strip() - if separator == '=': - self.set(head, tail) - else: - self.get(head) - - def sendError(self, msg: str) -> None: - """Send an error message to the client. Used when a command fails.""" - self.sendLine(f'[error] {msg}'.encode('utf-8')) - - def set(self, path: str, value_str: str) -> None: - """Run a `set` command in sysctl.""" - try: - value = self._deserialize(value_str) - except json.JSONDecodeError: - self.sendError('value: wrong format') - return try: - self.root.set(path, value) + feedback = self.runner.run(line) + if feedback: + self.sendLine(feedback) except SysctlEntryNotFound: + path, _, _ = self.runner.get_line_parts(line) self.sendError(f'{path} not found') except SysctlReadOnlyEntry: + path, _, _ = self.runner.get_line_parts(line) self.sendError(f'cannot write to {path}') + except SysctlWriteOnlyEntry: + path, _, _ = self.runner.get_line_parts(line) + self.sendError(f'cannot read from {path}') except SysctlException as e: self.sendError(str(e)) except ValidationError as e: self.sendError(str(e)) - except TypeError as e: + except SysctlRunnerException as e: self.sendError(str(e)) - def get(self, path: str) -> None: - """Run a `get` command in sysctl.""" - try: - value = self.root.get(path) - except SysctlEntryNotFound: - self.sendError(f'{path} not found') - except SysctlWriteOnlyEntry: - self.sendError(f'cannot read from {path}') - else: - output = self._serialize(value) - self.sendLine(output.encode('utf-8')) + def sendError(self, msg: str) -> None: + """Send an error message to the client. Used when a command fails.""" + self.sendLine(f'[error] {msg}'.encode('utf-8')) def backup(self) -> None: """Run a `backup` command, sending all parameters to the client.""" - for key, value in self.root.get_all(): - output = f'{key}={self._serialize(value)}' + for key, value in self.runner.root.get_all(): + output = f'{key}={self.runner.serialize(value)}' self.sendLine(output.encode('utf-8')) def help(self, path: str) -> None: @@ -100,7 +84,7 @@ def help(self, path: str) -> None: self._send_all_commands() return try: - cmd = self.root.get_command(path) + cmd = self.runner.root.get_command(path) except SysctlEntryNotFound: self.sendError(f'{path} not found') return @@ -112,29 +96,18 @@ def help(self, path: str) -> None: self.sendLine('\n'.join(output).encode('utf-8')) def _send_all_commands(self) -> None: - all_paths = list(self.root.get_all_paths()) + all_paths = list(self.runner.root.get_all_paths()) for path in sorted(all_paths): self.sendLine(path.encode('utf-8')) - def _serialize(self, value: Any) -> str: - """Serialize the return of a sysctl getter.""" - output: str - if isinstance(value, tuple): - parts = (json.dumps(x) for x in value) - output = ', '.join(parts) - else: - output = json.dumps(value) + def _get_all_commands(self) -> list[str]: + """Get a list of all commands availale in the sysctl.""" + all_paths = list(self.runner.root.get_all_paths()) + output: list[str] = [] + for path in sorted(all_paths): + output.append(path) return output - def _deserialize(self, value_str: str) -> Any: - """Deserialize a value sent by the client.""" - if len(value_str) == 0: - return () - parts = [x.strip() for x in value_str.split(',')] - if len(parts) > 1: - return tuple(json.loads(x) for x in parts) - return json.loads(value_str) - def _get_method_help(self, method_name: str, method: Optional[Callable]) -> list[str]: """Return a list of strings with the help for `method`.""" if method is None: diff --git a/hathor/sysctl/runner.py b/hathor/sysctl/runner.py new file mode 100644 index 000000000..85850b2db --- /dev/null +++ b/hathor/sysctl/runner.py @@ -0,0 +1,81 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from typing import TYPE_CHECKING, Any + +from hathor.sysctl.exception import SysctlRunnerException + +if TYPE_CHECKING: + from hathor.sysctl.sysctl import Sysctl + + +class SysctlRunner: + """ Encapsulates the Sysctl to decouple it from the SyctlProtocol. + """ + + def __init__(self, root: 'Sysctl') -> None: + self.root = root + + def run(self, line: str) -> bytes: + """Receives a string line, parses, interprets, acts over the Sysctl, + and returns an UTF-8 encoding data as feedback. + """ + if not line: + raise SysctlRunnerException('line cannot be empty or None') + + head, separator, tail = self.get_line_parts(line) + if separator == '=': + return self._set(head, tail) + else: + return self._get(head) + + def _set(self, path: str, value_str: str) -> bytes: + """Run a `set` command in sysctl, and return and empty feedback.""" + try: + value = self.deserialize(value_str) + except json.JSONDecodeError: + raise SysctlRunnerException('value: wrong format') + + self.root.set(path, value) + return b'' + + def _get(self, path: str) -> bytes: + """Run a `get` command in sysctl.""" + value = self.root.get(path) + return self.serialize(value).encode('utf-8') + + def get_line_parts(self, line: str) -> tuple[str, ...]: + """Get line parts and return a tuple with head, separator, tail.""" + head, separator, tail = line.partition('=') + head = head.strip() + tail = tail.strip() + return (head, separator, tail) + + def serialize(self, value: Any) -> str: + """Serialize the return of a sysctl getter.""" + if isinstance(value, tuple): + parts = (json.dumps(x) for x in value) + return ', '.join(parts) + else: + return json.dumps(value) + + def deserialize(self, value_str: str) -> Any: + """Deserialize a value sent by the client.""" + if len(value_str) == 0: + return () + parts = [x.strip() for x in value_str.split(',')] + if len(parts) > 1: + return tuple(json.loads(x) for x in parts) + return json.loads(value_str) diff --git a/hathor/transaction/aux_pow.py b/hathor/transaction/aux_pow.py index 795d9a9f7..0a18ee2aa 100644 --- a/hathor/transaction/aux_pow.py +++ b/hathor/transaction/aux_pow.py @@ -58,8 +58,9 @@ def verify(self, _base_block_hash: bytes) -> None: raise AuxPowNoMagicError('cannot find MAGIC_NUMBER') if magic_index < len(self.coinbase_head) - len(MAGIC_NUMBER): raise AuxPowUnexpectedMagicError('unexpected MAGIC_NUMBER') - if len(self.merkle_path) > MAX_MERKLE_PATH_LENGTH: - raise AuxPowLongMerklePathError('`merkle_path` too long') + merkle_path_length = len(self.merkle_path) + if merkle_path_length > MAX_MERKLE_PATH_LENGTH: + raise AuxPowLongMerklePathError(f'merkle_path too long: {merkle_path_length} > {MAX_MERKLE_PATH_LENGTH}') def __bytes__(self) -> bytes: """ Convert to byte representation. diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 10c4dfd0c..ef8350fde 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -1035,6 +1035,7 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - data['timestamp'] = self.timestamp data['version'] = int(self.version) data['weight'] = self.weight + data['signal_bits'] = self.signal_bits data['parents'] = [] for parent in self.parents: diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index 699c6e763..08fe7c69a 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -35,6 +35,7 @@ WeightError, ) from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len +from hathor.util import not_none from hathor.utils.int import get_bit_list if TYPE_CHECKING: @@ -112,8 +113,13 @@ def calculate_min_height(self) -> int: """ assert self.storage is not None # maximum min-height of any parent tx - return max((self.storage.get_transaction(tx).get_metadata().min_height for tx in self.get_tx_parents()), - default=0) + min_height = 0 + for tx_hash in self.get_tx_parents(): + tx = self.storage.get_transaction(tx_hash) + tx_min_height = tx.get_metadata().min_height + min_height = max(min_height, not_none(tx_min_height)) + + return min_height def calculate_feature_activation_bit_counts(self) -> list[int]: """ @@ -350,6 +356,7 @@ def verify_height(self) -> None: """Validate that the block height is enough to confirm all transactions being confirmed.""" meta = self.get_metadata() assert meta.height is not None + assert meta.min_height is not None if meta.height < meta.min_height: raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index bd089dcb1..b702838e3 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -459,18 +459,19 @@ def _validate_block_height_metadata(self, tx: BaseTransaction) -> None: def remove_transaction(self, tx: BaseTransaction) -> None: """Remove the tx. - :param tx: Trasaction to be removed + :param tx: Transaction to be removed """ if self.indexes is not None: self.del_from_indexes(tx, remove_all=True, relax_assert=True) def remove_transactions(self, txs: list[BaseTransaction]) -> None: - """Will remove all of the transactions on the list from the database. + """Will remove all the transactions on the list from the database. Special notes: - will refuse and raise an error when removing all transactions would leave dangling transactions, that is, - transactions without existing parent + transactions without existing parent. That is, it expects the `txs` list to include all children of deleted + txs, from both the confirmation and funds DAGs - inputs's spent_outputs should not have any of the transactions being removed as spending transactions, this method will update and save those transaction's metadata - parent's children metadata will be updated to reflect the removals @@ -486,6 +487,8 @@ def remove_transactions(self, txs: list[BaseTransaction]) -> None: for parent in set(tx.parents) - txset: parents_to_update[parent].append(tx.hash) dangling_children.update(set(tx_meta.children) - txset) + for spending_txs in tx_meta.spent_outputs.values(): + dangling_children.update(set(spending_txs) - txset) for tx_input in tx.inputs: spent_tx = tx.get_spent_tx(tx_input) spent_tx_meta = spent_tx.get_metadata() @@ -514,7 +517,9 @@ def transaction_exists(self, hash_bytes: bytes) -> bool: def compare_bytes_with_local_tx(self, tx: BaseTransaction) -> bool: """Compare byte-per-byte `tx` with the local transaction.""" assert tx.hash is not None - local_tx = self.get_transaction(tx.hash) + # XXX: we have to accept any scope because we only want to know what bytes we have stored + with tx_allow_context(self, allow_scope=TxAllowScope.ALL): + local_tx = self.get_transaction(tx.hash) local_tx_bytes = bytes(local_tx) tx_bytes = bytes(tx) if tx_bytes == local_tx_bytes: @@ -1065,13 +1070,18 @@ def iter_mempool_from_best_index(self) -> Iterator[Transaction]: else: yield from self.iter_mempool_from_tx_tips() - def compute_transactions_that_became_invalid(self) -> list[BaseTransaction]: - """ This method will look for transactions in the mempool that have became invalid due to the reward lock. + def compute_transactions_that_became_invalid(self, new_best_height: int) -> list[BaseTransaction]: + """ This method will look for transactions in the mempool that have become invalid due to the reward lock. + It compares each tx's `min_height` to the `new_best_height`, accounting for the fact that the tx can be + confirmed by the next block. """ from hathor.transaction.validation_state import ValidationState to_remove: list[BaseTransaction] = [] for tx in self.iter_mempool_from_best_index(): - if tx.is_spent_reward_locked(): + tx_min_height = tx.get_metadata().min_height + assert tx_min_height is not None + # We use +1 here because a tx is valid if it can be confirmed by the next block + if new_best_height + 1 < tx_min_height: tx.set_validation(ValidationState.INVALID) to_remove.append(tx) return to_remove diff --git a/hathor/transaction/storage/tx_allow_scope.py b/hathor/transaction/storage/tx_allow_scope.py index 34031aee1..90f9899e2 100644 --- a/hathor/transaction/storage/tx_allow_scope.py +++ b/hathor/transaction/storage/tx_allow_scope.py @@ -35,6 +35,7 @@ class TxAllowScope(Flag): VALID = auto() PARTIAL = auto() INVALID = auto() + ALL = VALID | PARTIAL | INVALID def is_allowed(self, tx: BaseTransaction) -> bool: """True means it is allowed to be used in the storage (as argument or as return), False means not allowed.""" diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index 712ef563e..90150b7a0 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -43,6 +43,7 @@ ) from hathor.transaction.util import VerboseCallback, get_deposit_amount, get_withdraw_amount, unpack, unpack_len from hathor.types import TokenUid, VertexId +from hathor.util import not_none if TYPE_CHECKING: from hathor.transaction.storage import TransactionStorage # noqa: F401 @@ -147,11 +148,11 @@ def _calculate_inherited_min_height(self) -> int: iter_parents = map(self.storage.get_transaction, self.get_tx_parents()) iter_inputs = map(self.get_spent_tx, self.inputs) for tx in chain(iter_parents, iter_inputs): - min_height = max(min_height, tx.get_metadata().min_height) + min_height = max(min_height, not_none(tx.get_metadata().min_height)) return min_height def _calculate_my_min_height(self) -> int: - """ Calculates min height derived from own spent rewards""" + """ Calculates min height derived from own spent block rewards""" min_height = 0 for blk in self.iter_spent_rewards(): min_height = max(min_height, blk.get_height() + settings.REWARD_SPEND_MIN_BLOCKS + 1) @@ -573,17 +574,20 @@ def verify_inputs(self, *, skip_script: bool = False) -> None: spent_outputs.add(key) def verify_reward_locked(self) -> None: - """Will raise `RewardLocked` if any reward is spent before the best block height is enough.""" + """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only + the block rewards spent by this tx itself, and not the inherited `min_height`.""" info = self.get_spent_reward_locked_info() if info is not None: raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') def is_spent_reward_locked(self) -> bool: - """ Verify whether any spent reward is currently locked.""" + """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx + itself, and not the inherited `min_height`""" return self.get_spent_reward_locked_info() is not None def get_spent_reward_locked_info(self) -> Optional[RewardLockedInfo]: - """ Same verification as in `is_spent_reward_locked`, but returns extra information or None for False.""" + """Check if any input block reward is locked, returning the locked information if any, or None if they are all + unlocked.""" for blk in self.iter_spent_rewards(): assert blk.hash is not None needed_height = self._spent_reward_needed_height(blk) @@ -592,7 +596,7 @@ def get_spent_reward_locked_info(self) -> Optional[RewardLockedInfo]: return None def _spent_reward_needed_height(self, block: Block) -> int: - """ Returns height still needed to unlock this reward: 0 means it's unlocked.""" + """ Returns height still needed to unlock this `block` reward: 0 means it's unlocked.""" import math assert self.storage is not None # omitting timestamp to get the current best block, this will usually hit the cache instead of being slow diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index cecf1c8f5..fd0039cdf 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -44,7 +44,7 @@ class TransactionMetadata: # XXX: this is only used to defer the reward-lock verification from the transaction spending a reward to the first # block that confirming this transaction, it is important to always have this set to be able to distinguish an old # metadata (that does not have this calculated, from a tx with a new format that does have this calculated) - min_height: int + min_height: Optional[int] # A list of feature activation bit counts. Must only be used by Blocks, is None otherwise. # Each list index corresponds to a bit position, and its respective value is the rolling count of active bits from @@ -68,7 +68,7 @@ def __init__( accumulated_weight: float = 0, score: float = 0, height: Optional[int] = None, - min_height: int = 0, + min_height: Optional[int] = None, feature_activation_bit_counts: Optional[list[int]] = None ) -> None: from hathor.transaction.genesis import is_genesis @@ -276,7 +276,7 @@ def create_from_json(cls, data: dict[str, Any]) -> 'TransactionMetadata': meta.accumulated_weight = data['accumulated_weight'] meta.score = data.get('score', 0) meta.height = data.get('height', 0) # XXX: should we calculate the height if it's not defined? - meta.min_height = data.get('min_height', 0) + meta.min_height = data.get('min_height') meta.feature_activation_bit_counts = data.get('feature_activation_bit_counts', []) feature_states_raw = data.get('feature_states') diff --git a/hathor/util.py b/hathor/util.py index 248ee465e..73930d41c 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -22,44 +22,33 @@ from collections import OrderedDict from contextlib import AbstractContextManager from dataclasses import asdict, dataclass -from enum import Enum from functools import partial, wraps from random import Random as PyRandom -from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Optional, Sequence, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Optional, Sequence, TypeVar, cast from structlog import get_logger -from twisted.internet import reactor as twisted_reactor -from twisted.internet.base import ReactorBase -from twisted.internet.posixbase import PosixReactorBase -from twisted.python.threadable import isInIOThread -from zope.interface import Interface -from zope.interface.verify import verifyObject import hathor from hathor.conf import HathorSettings +from hathor.reactor.reactor import reactor as hathor_reactor +from hathor.reactor.reactor_protocol import ReactorProtocol from hathor.types import TokenUid if TYPE_CHECKING: import structlog - from hathor.simulator.clock import HeapClock from hathor.transaction.base_transaction import BaseTransaction -# Reactor = IReactorTime -# XXX: Ideally we would want to be able to express Reactor as IReactorTime+IReactorCore, which is what everyone using -# this type annotation needs, however it is not possible to express this. In practice most classes that implement -# these interfaces use ReactorBase as base, however that is not the case for MemoryReactorClock, which inherits -# IReactorTime from Clock and IReactorCore from MemoryReactor. For the lack of a better approach, a union of these -# types is enough for most of our uses. If we end up having to use a different reactor that does not use those -# bases but implement IReactorTime+IReactorCore, we could add it to the Union below -Reactor = Union[ReactorBase, 'HeapClock'] -reactor = cast(PosixReactorBase, twisted_reactor) +# TODO: Those reexports are kept for retro-compatibility, but users could import them directly and then we can remove +# them from this file. +Reactor = ReactorProtocol +reactor = hathor_reactor + logger = get_logger() settings = HathorSettings() T = TypeVar('T') -Z = TypeVar('Z', bound=Interface) def practically_equal(a: dict[Any, Any], b: dict[Any, Any]) -> bool: @@ -112,28 +101,6 @@ def skip_warning(func: Callable[..., Any]) -> Callable[..., Any]: return f -class ReactorThread(Enum): - MAIN_THREAD = 'MAIN_THREAD' - NOT_MAIN_THREAD = 'NOT_MAIN_THREAD' - NOT_RUNNING = 'NOT_RUNNING' - - @classmethod - def get_current_thread(cls, reactor: Reactor) -> 'ReactorThread': - """ Returns if the code is being run on the reactor thread, if it's running already. - """ - running = getattr(reactor, 'running', None) - if running is not None: - if running: - return cls.MAIN_THREAD if isInIOThread() else cls.NOT_MAIN_THREAD - else: - # if reactor is not running yet, there's no threading - return cls.NOT_RUNNING - else: - # on tests, we use Clock instead of a real Reactor, so there's - # no threading. We consider that the reactor is running - return cls.MAIN_THREAD - - def abbrev(data: bytes, max_len: int = 256, gap: bytes = b' [...] ') -> bytes: """ Abbreviates data, mostly for less verbose but still useful logging. @@ -419,11 +386,6 @@ def skip_n(it: Iterator[_T], n: int) -> Iterator[_T]: return it -def verified_cast(interface_class: type[Z], obj: Any) -> Z: - verifyObject(interface_class, obj) - return obj - - _DT_ITER_NEXT_WARN = 3 # time in seconds to warn when `next(iter_tx)` takes too long _DT_LOG_PROGRESS = 30 # time in seconds after which a progress will be logged (it can take longer, but not shorter) _DT_YIELD_WARN = 1 # time in seconds to warn when `yield tx` takes too long (which is when processing happens) diff --git a/hathor/utils/iter.py b/hathor/utils/iter.py new file mode 100644 index 000000000..73139b979 --- /dev/null +++ b/hathor/utils/iter.py @@ -0,0 +1,44 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Iterable, TypeVar + +T = TypeVar('T') + + +def batch_iterator(iterator: Iterable[T], batch_size: int) -> Iterable[list[T]]: + """ + Yield batches of up to batch_size items from iterator. + + >>> list(batch_iterator([], 10)) + [] + >>> list(batch_iterator([1, 2, 3, 4], 1)) + [[1], [2], [3], [4]] + >>> list(batch_iterator([1, 2, 3, 4], 2)) + [[1, 2], [3, 4]] + >>> list(batch_iterator([1, 2, 3, 4], 3)) + [[1, 2, 3], [4]] + >>> list(batch_iterator([1, 2, 3, 4], 4)) + [[1, 2, 3, 4]] + """ + assert batch_size >= 1 + batch = [] + for item in iterator: + batch.append(item) + if len(batch) >= batch_size: + yield batch + batch = [] + + if batch: + yield batch diff --git a/hathor/utils/zope.py b/hathor/utils/zope.py new file mode 100644 index 000000000..aacb542fe --- /dev/null +++ b/hathor/utils/zope.py @@ -0,0 +1,45 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Optional, TypeVar, cast + +from zope.interface import Interface +from zope.interface.exceptions import Invalid +from zope.interface.verify import verifyObject + +T = TypeVar('T', bound=Interface) + + +def verified_cast(interface_class: type[T], obj: Any) -> Optional[T]: + """ + Receive a zope interface and an object, and return a cast to this interface if the object implements it. + Return None otherwise. + """ + try: + if verifyObject(interface_class, obj): + return cast(T, obj) + except Invalid: + pass + + return None + + +def asserted_cast(interface_class: type[T], obj: Any) -> T: + """ + Receive a zope interface and an object, and return a cast to this interface if the object implements it. + Raise and AssertionError otherwise. + """ + result = verified_cast(interface_class, obj) + assert result is not None + return result diff --git a/hathor/version.py b/hathor/version.py index 3cbb39fcb..0c3ca7621 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -19,7 +19,7 @@ from structlog import get_logger -BASE_VERSION = '0.55.0' +BASE_VERSION = '0.56.0' DEFAULT_VERSION_SUFFIX = "local" BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" diff --git a/pyproject.toml b/pyproject.toml index a5f5964ff..78c782e6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.55.0" +version = "0.56.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" diff --git a/tests/event/test_event_reorg.py b/tests/event/test_event_reorg.py index d106f47e1..c7c4b5560 100644 --- a/tests/event/test_event_reorg.py +++ b/tests/event/test_event_reorg.py @@ -1,9 +1,10 @@ +from typing import Any from hathor.conf import HathorSettings from hathor.event.model.event_type import EventType from hathor.event.storage import EventMemoryStorage from tests import unittest -from tests.utils import add_new_blocks, get_genesis_key, zip_chunkify +from tests.utils import add_new_blocks, get_genesis_key settings = HathorSettings() @@ -47,76 +48,58 @@ def test_reorg_events(self): # check events actual_events = list(self.event_storage.iter_from_event(0)) - # events are separated into portions that are sorted (indicated by using lists) and portions that are unsorted - # (indicated by using a custom class), the unsorted parts mean that the given events must be present, but not - # necessarily in the given order, to check that we sort both the expected and actual events by tx hash to be - # able to match them, but only for the "unsorted" portions will, for the "sorted" portions the order is - # expected to be the given one - class unsorted(list): - pass - expected_events_grouped = [ - [ - (EventType.LOAD_STARTED, {}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_BLOCK_HASH.hex()}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_TX1_HASH.hex()}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_TX2_HASH.hex()}), - (EventType.LOAD_FINISHED, {}) - ], - # XXX: the order of the following events can vary depending on which genesis is spent/confirmed first - unsorted([ + expected_events = [ + (EventType.LOAD_STARTED, {}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_BLOCK_HASH.hex()}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_TX1_HASH.hex()}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': settings.GENESIS_TX2_HASH.hex()}), + (EventType.LOAD_FINISHED, {}), + *sorted_by_hash( + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[0].hash_hex}), (EventType.VERTEX_METADATA_CHANGED, {'hash': settings.GENESIS_TX1_HASH.hex()}), (EventType.VERTEX_METADATA_CHANGED, {'hash': settings.GENESIS_TX2_HASH.hex()}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[0].hash_hex}), - ]), - # XXX: these events must always have this order - [ - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[0].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[1].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[1].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[2].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[2].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[3].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[3].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[4].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[4].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[5].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[5].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[6].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[6].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[7].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[7].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[8].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[8].hash_hex}), + ), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[0].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[1].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[1].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[2].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[2].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[3].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[3].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[4].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[4].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[5].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[5].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[6].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[6].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[7].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[7].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[8].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[8].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[9].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[9].hash_hex}), + (EventType.REORG_STARTED, {'reorg_size': 2, 'previous_best_block': blocks[9].hash_hex, + 'new_best_block': b0.hash_hex}), + *sorted_by_hash( + (EventType.VERTEX_METADATA_CHANGED, {'hash': b0.hash_hex}), (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[9].hash_hex}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[9].hash_hex}), - (EventType.REORG_STARTED, {'reorg_size': 2, 'previous_best_block': blocks[9].hash_hex, - 'new_best_block': b0.hash_hex}), - ], - # XXX: for some reason the metadata update order of these events isn't always the same - unsorted([ (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[8].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[9].hash_hex}), - (EventType.VERTEX_METADATA_CHANGED, {'hash': b0.hash_hex}), - ]), - # XXX: these events must always have this order - [ - (EventType.REORG_FINISHED, {}), - (EventType.NEW_VERTEX_ACCEPTED, {'hash': b0.hash_hex}), - ], + ), + (EventType.REORG_FINISHED, {}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': b0.hash_hex}), ] - for actual_events, expected_events in zip_chunkify(actual_events, expected_events_grouped): - if isinstance(expected_events, unsorted): - actual_events.sort(key=lambda i: i.data.hash) - expected_events.sort(key=lambda i: i[1].get('hash', '')) + for actual_event, expected_event in zip(actual_events, expected_events): + expected_event_type, expected_partial_data = expected_event + + self.assertEqual(EventType(actual_event.type), expected_event_type) - for actual_event, expected_event in zip(actual_events, expected_events): - expected_event_type, expected_partial_data = expected_event + for expected_data_key, expected_data_value in expected_partial_data.items(): + self.assertEqual(actual_event.data.dict()[expected_data_key], expected_data_value) - self.assertEqual(EventType(actual_event.type), expected_event_type) - for expected_data_key, expected_data_value in expected_partial_data.items(): - self.assertEqual(actual_event.data.dict()[expected_data_key], expected_data_value) +def sorted_by_hash(*events: tuple[EventType, dict[str, Any]]) -> list[tuple[EventType, dict[str, Any]]]: + return sorted(events, key=lambda event: event[1]['hash']) class SyncV1EventReorgTest(unittest.SyncV1Params, BaseEventReorgTest): diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index fea2548ec..d673042b6 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -25,7 +25,6 @@ MemoryEventSimulationTester, RocksDBEventSimulationTester, ) -from tests.utils import zip_chunkify class BaseEventSimulationScenariosTest(BaseEventSimulationTester): @@ -54,7 +53,7 @@ def test_only_load(self) -> None: EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=4) # noqa: E501 ] - assert responses == expected + assert responses == expected, f'expected: {expected}\n\nactual: {responses}' def test_single_chain_one_block(self): miner = self.simulator.create_miner(self.manager, hashpower=1e6) @@ -71,29 +70,23 @@ def test_single_chain_one_block(self): responses = self._get_success_responses() expected = [ - [ - # LOAD_STATED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=8), # noqa E501 - # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', token_data=0)], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa: E501 - # LOAD_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=8), # noqa E501 - ], - UnorderedList([ - # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa E501 - ]), - [ - # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8) # noqa E501 - ] + # LOAD_STATED + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=8), # noqa E501 + # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', token_data=0)], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa: E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa: E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa: E501 + # LOAD_FINISHED + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=8), # noqa E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8), # noqa E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8) # noqa E501 ] - _assert_equal_events(responses, expected) + assert responses == expected, f'expected: {expected}\n\nactual: {responses}' def test_single_chain_blocks_and_transactions(self): miner = self.simulator.create_miner(self.manager, hashpower=1e6) @@ -122,73 +115,57 @@ def test_single_chain_blocks_and_transactions(self): responses = self._get_success_responses() expected = [ - [ - # LOAD_STATED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=36), # noqa E501 - # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', token_data=0)], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa: E501 - # LOAD_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=36), # noqa E501 - ], - UnorderedList([ - # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - ]), - [ - # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=9, timestamp=1578878910.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', nonce=1279407725, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', token_data=0)], parents=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=10, timestamp=1578878910.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', nonce=1279407725, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', token_data=0)], parents=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=11, timestamp=1578878910.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', nonce=1529920189, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', token_data=0)], parents=['967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=12, timestamp=1578878910.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', nonce=1529920189, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', token_data=0)], parents=['967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=13, timestamp=1578878911.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', nonce=1828786391, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', token_data=0)], parents=['68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=14, timestamp=1578878911.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', nonce=1828786391, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', token_data=0)], parents=['68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=15, timestamp=1578878911.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', nonce=1915673046, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', token_data=0)], parents=['0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=16, timestamp=1578878911.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', nonce=1915673046, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', token_data=0)], parents=['0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=17, timestamp=1578878911.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', nonce=1279525218, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', token_data=0)], parents=['79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=18, timestamp=1578878911.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', nonce=1279525218, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', token_data=0)], parents=['79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=19, timestamp=1578878911.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', nonce=4136633663, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', token_data=0)], parents=['0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=20, timestamp=1578878911.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', nonce=4136633663, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', token_data=0)], parents=['0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=21, timestamp=1578878912.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', nonce=945260546, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', token_data=0)], parents=['3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=22, timestamp=1578878912.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', nonce=945260546, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', token_data=0)], parents=['3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=23, timestamp=1578878912.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', nonce=2222918728, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', token_data=0)], parents=['282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=24, timestamp=1578878912.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', nonce=2222918728, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', token_data=0)], parents=['282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=25, timestamp=1578878912.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', nonce=3090209358, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', token_data=0)], parents=['4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=26, timestamp=1578878912.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', nonce=3090209358, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', token_data=0)], parents=['4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=27, timestamp=1578878912.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', nonce=3024981275, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', token_data=0)], parents=['47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=28, timestamp=1578878912.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', nonce=3024981275, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', token_data=0)], parents=['47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - ], - UnorderedList([ - # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=30, timestamp=1578879395.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[SpentOutput(index=0, tx_ids=['bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c'])], conflict_with=[], voided_by=[], received_by=[], children=['967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=29, timestamp=1578879395.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', nonce=0, timestamp=1578879395, version=1, weight=18.648830153779993, inputs=[TxInput(tx_id='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', index=0, data='RjBEAiBQAVgs/jYpndowN3/JMI07fM886ergyyGA2xXaHCu5BQIgf58pdc8S9ABgzkUx8qDQUtV3FcYCMMzHs90q36L0J+whA0zN7LEgxcswPkHbbW7mdnkS5yviaZFgjID1mhpSZSQf')], outputs=[TxOutput(value=2132, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', token_data=0), TxOutput(value=4268, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.648830153779993, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - ]), - [ - # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=31, timestamp=1578879395.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', nonce=0, timestamp=1578879395, version=1, weight=18.648830153779993, inputs=[TxInput(tx_id='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', index=0, data='RjBEAiBQAVgs/jYpndowN3/JMI07fM886ergyyGA2xXaHCu5BQIgf58pdc8S9ABgzkUx8qDQUtV3FcYCMMzHs90q36L0J+whA0zN7LEgxcswPkHbbW7mdnkS5yviaZFgjID1mhpSZSQf')], outputs=[TxOutput(value=2132, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', token_data=0), TxOutput(value=4268, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.648830153779993, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - ], - UnorderedList([ - # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=32, timestamp=1578879429.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', nonce=0, timestamp=1578879395, version=1, weight=18.648830153779993, inputs=[TxInput(tx_id='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', index=0, data='RjBEAiBQAVgs/jYpndowN3/JMI07fM886ergyyGA2xXaHCu5BQIgf58pdc8S9ABgzkUx8qDQUtV3FcYCMMzHs90q36L0J+whA0zN7LEgxcswPkHbbW7mdnkS5yviaZFgjID1mhpSZSQf')], outputs=[TxOutput(value=2132, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', token_data=0), TxOutput(value=4268, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', spent_outputs=[SpentOutput(index=0, tx_ids=['47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea']), SpentOutput(index=1, tx_ids=['47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea'])], conflict_with=[], voided_by=[], received_by=[], children=['47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea'], twins=[], accumulated_weight=18.648830153779993, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=33, timestamp=1578879429.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', nonce=0, timestamp=1578879423, version=1, weight=19.563446104298656, inputs=[TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=0, data='RzBFAiANP0tUHsWIPFnkwG0Io7W53viGaXgWfyniIsyJHIva7AIhAIij02z4JPki4RT5b/UkJoKVgazA7QWIq1Zwvik2xhXZIQOnBXShvpioJZgNxYy4lgCg4o84Uw9ncOQuv9mxPWEqeg=='), TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=1, data='RjBEAiAj6I8PifTvE1J7eAqxHLu2YqFThzc4C4YzIzq+aX1fBQIgdH+xu08lWjdGiRjuaYgf/S9EZYw8U1HJQ2//WtERx9chA9euCC5MDnVV3I2e5yKj1Q65BsqgN2+IwojZqatIF3T8')], outputs=[TxOutput(value=2764, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', token_data=0), TxOutput(value=3636, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', token_data=0)], parents=['bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.563446104298656, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - ]), - [ - # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=34, timestamp=1578879429.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', nonce=0, timestamp=1578879423, version=1, weight=19.563446104298656, inputs=[TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=0, data='RzBFAiANP0tUHsWIPFnkwG0Io7W53viGaXgWfyniIsyJHIva7AIhAIij02z4JPki4RT5b/UkJoKVgazA7QWIq1Zwvik2xhXZIQOnBXShvpioJZgNxYy4lgCg4o84Uw9ncOQuv9mxPWEqeg=='), TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=1, data='RjBEAiAj6I8PifTvE1J7eAqxHLu2YqFThzc4C4YzIzq+aX1fBQIgdH+xu08lWjdGiRjuaYgf/S9EZYw8U1HJQ2//WtERx9chA9euCC5MDnVV3I2e5yKj1Q65BsqgN2+IwojZqatIF3T8')], outputs=[TxOutput(value=2764, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', token_data=0), TxOutput(value=3636, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', token_data=0)], parents=['bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.563446104298656, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - ], - [ - # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=35, timestamp=1578879429.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', nonce=2907562093, timestamp=1578878921, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=8.285402218862249, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=36, timestamp=1578879429.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', nonce=2907562093, timestamp=1578878921, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=8.285402218862249, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=36) # noqa E501 - ] + # LOAD_STATED + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=36), # noqa E501 + # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', token_data=0)], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa: E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa: E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa: E501 + # LOAD_FINISHED + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=36), # noqa E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360'], twins=[], accumulated_weight=2.0, score=2.0, first_block='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + # One NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=9, timestamp=1578878910.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', nonce=1279407725, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', token_data=0)], parents=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=10, timestamp=1578878910.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', nonce=1279407725, timestamp=1578878911, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', token_data=0)], parents=['53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=11, timestamp=1578878910.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', nonce=1529920189, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', token_data=0)], parents=['967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=12, timestamp=1578878910.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', nonce=1529920189, timestamp=1578878912, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', token_data=0)], parents=['967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.584962500721156, first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=13, timestamp=1578878911.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', nonce=1828786391, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', token_data=0)], parents=['68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=14, timestamp=1578878911.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', nonce=1828786391, timestamp=1578878913, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', token_data=0)], parents=['68585cc41a0cc261e97e5cf9d035e4950a9897b9fba18cbec194824522c03c7b', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.807354922057604, first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=15, timestamp=1578878911.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', nonce=1915673046, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', token_data=0)], parents=['0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=16, timestamp=1578878911.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', nonce=1915673046, timestamp=1578878914, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', token_data=0)], parents=['0ed0c5540b258485199d1938646bcf10fc3b086da1110af41806c6d0792f413e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.0, first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=17, timestamp=1578878911.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', nonce=1279525218, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', token_data=0)], parents=['79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=18, timestamp=1578878911.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', nonce=1279525218, timestamp=1578878915, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', token_data=0)], parents=['79c4da1aa12adbb3e384f3e631cf0f8898922db6a7a157c878d6dd5d27b62e77', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.169925001442312, first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=19, timestamp=1578878911.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', nonce=4136633663, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', token_data=0)], parents=['0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=20, timestamp=1578878911.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', nonce=4136633663, timestamp=1578878916, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', token_data=0)], parents=['0c88949d8b9eef81bf0896aabe875c4bd1172bc834b7bc4cc1c756a3d243ec52', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.321928094887363, first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=21, timestamp=1578878912.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', nonce=945260546, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', token_data=0)], parents=['3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=22, timestamp=1578878912.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', nonce=945260546, timestamp=1578878917, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', token_data=0)], parents=['3624a05261b604d4ff46b0157892b2ff48a7fb1e31fe65e58fbbfe88cadcbdd4', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.459431618637297, first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=23, timestamp=1578878912.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', nonce=2222918728, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', token_data=0)], parents=['282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=24, timestamp=1578878912.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', nonce=2222918728, timestamp=1578878918, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', token_data=0)], parents=['282d65827ecc485b1c8b916f71605da0f15f353325feb9a1a87fe320c1ebc915', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.584962500721156, first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=25, timestamp=1578878912.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', nonce=3090209358, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', token_data=0)], parents=['4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=26, timestamp=1578878912.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', nonce=3090209358, timestamp=1578878919, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', token_data=0)], parents=['4978ebbeb98dccae64f3d32269e1a1030892c82b023448cafd649cad73e9127a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.700439718141092, first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=27, timestamp=1578878912.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', nonce=3024981275, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', token_data=0)], parents=['47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=28, timestamp=1578878912.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', nonce=3024981275, timestamp=1578878920, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', token_data=0)], parents=['47ced60615a9d5d4fc1ca26d9dbc71ca89b8b35541b31d8e75a65d4badfa99c7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=29, timestamp=1578879395.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', nonce=105631935, timestamp=1578878910, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', spent_outputs=[SpentOutput(index=0, tx_ids=['bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c'])], conflict_with=[], voided_by=[], received_by=[], children=['967f7d6577e5b301d7facaf7fecd87f18586acfd24fc52e49251c9c4195b49f6'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=30, timestamp=1578879395.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', nonce=0, timestamp=1578879395, version=1, weight=18.648830153779993, inputs=[TxInput(tx_id='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', index=0, data='RjBEAiBQAVgs/jYpndowN3/JMI07fM886ergyyGA2xXaHCu5BQIgf58pdc8S9ABgzkUx8qDQUtV3FcYCMMzHs90q36L0J+whA0zN7LEgxcswPkHbbW7mdnkS5yviaZFgjID1mhpSZSQf')], outputs=[TxOutput(value=2132, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', token_data=0), TxOutput(value=4268, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.648830153779993, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + # One NEW_VERTEX_ACCEPTED for a new tx + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=31, timestamp=1578879395.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', nonce=0, timestamp=1578879395, version=1, weight=18.648830153779993, inputs=[TxInput(tx_id='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', index=0, data='RjBEAiBQAVgs/jYpndowN3/JMI07fM886ergyyGA2xXaHCu5BQIgf58pdc8S9ABgzkUx8qDQUtV3FcYCMMzHs90q36L0J+whA0zN7LEgxcswPkHbbW7mdnkS5yviaZFgjID1mhpSZSQf')], outputs=[TxOutput(value=2132, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', token_data=0), TxOutput(value=4268, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.648830153779993, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=32, timestamp=1578879429.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', nonce=0, timestamp=1578879423, version=1, weight=19.563446104298656, inputs=[TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=0, data='RzBFAiANP0tUHsWIPFnkwG0Io7W53viGaXgWfyniIsyJHIva7AIhAIij02z4JPki4RT5b/UkJoKVgazA7QWIq1Zwvik2xhXZIQOnBXShvpioJZgNxYy4lgCg4o84Uw9ncOQuv9mxPWEqeg=='), TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=1, data='RjBEAiAj6I8PifTvE1J7eAqxHLu2YqFThzc4C4YzIzq+aX1fBQIgdH+xu08lWjdGiRjuaYgf/S9EZYw8U1HJQ2//WtERx9chA9euCC5MDnVV3I2e5yKj1Q65BsqgN2+IwojZqatIF3T8')], outputs=[TxOutput(value=2764, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', token_data=0), TxOutput(value=3636, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', token_data=0)], parents=['bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.563446104298656, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=33, timestamp=1578879429.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', nonce=0, timestamp=1578879395, version=1, weight=18.648830153779993, inputs=[TxInput(tx_id='53c5fe57a6d79ab4a8eca660bd0a9935fe9f82d2506d80e0c48339c42c234360', index=0, data='RjBEAiBQAVgs/jYpndowN3/JMI07fM886ergyyGA2xXaHCu5BQIgf58pdc8S9ABgzkUx8qDQUtV3FcYCMMzHs90q36L0J+whA0zN7LEgxcswPkHbbW7mdnkS5yviaZFgjID1mhpSZSQf')], outputs=[TxOutput(value=2132, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', token_data=0), TxOutput(value=4268, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', spent_outputs=[SpentOutput(index=0, tx_ids=['47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea']), SpentOutput(index=1, tx_ids=['47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea'])], conflict_with=[], voided_by=[], received_by=[], children=['47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea'], twins=[], accumulated_weight=18.648830153779993, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + # One NEW_VERTEX_ACCEPTED for a new tx + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=34, timestamp=1578879429.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', nonce=0, timestamp=1578879423, version=1, weight=19.563446104298656, inputs=[TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=0, data='RzBFAiANP0tUHsWIPFnkwG0Io7W53viGaXgWfyniIsyJHIva7AIhAIij02z4JPki4RT5b/UkJoKVgazA7QWIq1Zwvik2xhXZIQOnBXShvpioJZgNxYy4lgCg4o84Uw9ncOQuv9mxPWEqeg=='), TxInput(tx_id='bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', index=1, data='RjBEAiAj6I8PifTvE1J7eAqxHLu2YqFThzc4C4YzIzq+aX1fBQIgdH+xu08lWjdGiRjuaYgf/S9EZYw8U1HJQ2//WtERx9chA9euCC5MDnVV3I2e5yKj1Q65BsqgN2+IwojZqatIF3T8')], outputs=[TxOutput(value=2764, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', token_data=0), TxOutput(value=3636, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', token_data=0)], parents=['bd1c5ae097470eecfe10b3f25e02c0f8358b95706484b2174c6ea0ff930cc25c', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='47b26240891fb57320d72cbd708f5287ca056e73f4b19bb295e6b19d984c07ea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.563446104298656, score=0.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for a new block + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=35, timestamp=1578879429.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', nonce=2907562093, timestamp=1578878921, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=8.285402218862249, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=36), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=36, timestamp=1578879429.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', nonce=2907562093, timestamp=1578878921, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', token_data=0)], parents=['15599c25cb30892add963558e06c97c2a838a8a5c047aee416dc9a3617d59baa', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f2d1327e556ac534e0d934c4e884af3ffe5ecdfd7c5045e30a123a8171500990', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=8.285402218862249, first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=36) # noqa E501 ] - _assert_equal_events(responses, expected) + assert responses == expected, f'expected: {expected}\n\nactual: {responses}' def test_reorg(self): miner1 = self.simulator.create_miner(self.manager, hashpower=1e6) @@ -220,84 +197,43 @@ def test_reorg(self): responses = self._get_success_responses() expected = [ - [ - # LOAD_STATED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=20), # noqa E501 - # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', token_data=0)], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa: E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa: E501 - # LOAD_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=20), # noqa E501 - ], - UnorderedList([ - # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3'], twins=[], accumulated_weight=2.0, score=2.0, first_block='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3'], twins=[], accumulated_weight=2.0, score=2.0, first_block='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', nonce=2246536493, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - ]), - [ - # One NEW_VERTEX_ACCEPTED for a new block from manager1 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', nonce=2246536493, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - ], - UnorderedList([ - # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 - # Also one VERTEX_METADATA_CHANGED for the previous block, voiding it - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=9, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=12, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=11, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', nonce=2246536493, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', spent_outputs=[], conflict_with=[], voided_by=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=10, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', nonce=1279525218, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', spent_outputs=[], conflict_with=[], voided_by=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - ]), - [ - # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=13, timestamp=1578878949.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', nonce=1279525218, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', spent_outputs=[], conflict_with=[], voided_by=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 - # REORG_STARTED caused by a new block from manager2 (below) - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=14, timestamp=1578878950.0, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', new_best_block='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=20), # noqa E501 - ], - UnorderedList([ - # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 - # Also one VERTEX_METADATA_CHANGED for the previous block, un-voiding it as it's now part of the best blockchain # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=15, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', 'a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4'], twins=[], accumulated_weight=2.0, score=2.0, first_block='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=18, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', 'a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4'], twins=[], accumulated_weight=2.0, score=2.0, first_block='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=17, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', nonce=1279525218, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=16, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', nonce=4136633663, timestamp=1578878941, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', token_data=0)], parents=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 - ]), - [ - # REORG_FINISHED - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=19, timestamp=1578878950.0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=20), # noqa E501 - # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=20, timestamp=1578878950.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', nonce=4136633663, timestamp=1578878941, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', token_data=0)], parents=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20) # noqa E501 - ] + # LOAD_STATED + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=20), # noqa E501 + # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', token_data=0)], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa: E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa: E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa: E501 + # LOAD_FINISHED + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=20), # noqa E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3'], twins=[], accumulated_weight=2.0, score=2.0, first_block='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3'], twins=[], accumulated_weight=2.0, score=2.0, first_block='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', nonce=2246536493, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + # One NEW_VERTEX_ACCEPTED for a new block from manager1 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', nonce=2246536493, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 + # Also one VERTEX_METADATA_CHANGED for the previous block, voiding it + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=9, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=10, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], twins=[], accumulated_weight=2.0, score=2.0, first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=11, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', nonce=2246536493, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', spent_outputs=[], conflict_with=[], voided_by=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=12, timestamp=1578878949.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', nonce=1279525218, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', spent_outputs=[], conflict_with=[], voided_by=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + # One NEW_VERTEX_ACCEPTED for a new block from manager2 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=13, timestamp=1578878949.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', nonce=1279525218, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', spent_outputs=[], conflict_with=[], voided_by=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20), # noqa E501 + # REORG_STARTED caused by a new block from manager2 (below) + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=14, timestamp=1578878950.0, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', new_best_block='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=20), # noqa E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa E501 + # Also one VERTEX_METADATA_CHANGED for the previous block, un-voiding it as it's now part of the best blockchain # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=15, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', 'a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4'], twins=[], accumulated_weight=2.0, score=2.0, first_block='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=16, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3cfde60f140d2838581d885e656af1049fa8eab964defc5bca3d883b83c9afc3', '4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', 'a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4'], twins=[], accumulated_weight=2.0, score=2.0, first_block='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=17, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', nonce=1279525218, timestamp=1578878940, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', token_data=0)], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4'], twins=[], accumulated_weight=2.0, score=4.0, first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=18, timestamp=1578878950.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', nonce=4136633663, timestamp=1578878941, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', token_data=0)], parents=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20), # noqa E501 + # REORG_FINISHED + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=19, timestamp=1578878950.0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=20), # noqa E501 + # One NEW_VERTEX_ACCEPTED for a new block from manager2 + EventResponse(type='EVENT', event=BaseEvent(peer_id=self.peer_id, id=20, timestamp=1578878950.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', nonce=4136633663, timestamp=1578878941, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', token_data=0)], parents=['4f0b3d13966f95f461d4edc6389c8440955e13a75f87c6bc2a4b455d813fb2f7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='a729a7abb4248dffa492dd2c2634aa6d92b3e1e05bfa6614118b6ba97bfba5c4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20) # noqa E501 ] - _assert_equal_events(responses, expected) - - -class UnorderedList(list): - pass - - -def _assert_equal_events(actual_events: list[EventResponse], expected_events: list[list[EventResponse]]) -> None: - for actual_events_chunk, expected_events_chunk in zip_chunkify(actual_events, expected_events): - if isinstance(expected_events_chunk, UnorderedList): - actual_events_chunk = _sorted_by_hash_without_id(actual_events_chunk) - expected_events_chunk = _sorted_by_hash_without_id(expected_events_chunk) - - assert expected_events_chunk == actual_events_chunk, ( - f'different chunks:\n\nexpected: {expected_events_chunk}\n\nactual: {actual_events_chunk}\n' - ) - - -def _sorted_by_hash_without_id(responses: list[EventResponse]) -> list[EventResponse]: - responses_without_id = [response.copy(exclude={'event': {'id'}}) for response in responses] - - def key(response: EventResponse) -> str: - assert isinstance(response.event.data, TxData), ( - f'only tx events can be sorted. event.data type: {type(response.event.data)}' - ) - return response.event.data.hash - - return sorted(responses_without_id, key=key) + assert responses == expected, f'expected: {expected}\n\nactual: {responses}' class MemoryEventSimulationScenariosTest(BaseEventSimulationScenariosTest, MemoryEventSimulationTester): diff --git a/tests/event/websocket/test_factory.py b/tests/event/websocket/test_factory.py index d81509f27..c6e1b6cda 100644 --- a/tests/event/websocket/test_factory.py +++ b/tests/event/websocket/test_factory.py @@ -20,7 +20,7 @@ from hathor.event.websocket.factory import EventWebsocketFactory from hathor.event.websocket.protocol import EventWebsocketProtocol from hathor.event.websocket.response import EventResponse, InvalidRequestType -from hathor.simulator.clock import HeapClock +from hathor.simulator.clock import MemoryReactorHeapClock from tests.utils import EventMocker @@ -109,7 +109,7 @@ def test_broadcast_multiple_events_multiple_connections(): ) def test_send_next_event_to_connection(next_expected_event_id: int, can_receive_event: bool) -> None: n_starting_events = 10 - clock = HeapClock() + clock = MemoryReactorHeapClock() factory = _get_factory(n_starting_events, clock) connection = Mock(spec_set=EventWebsocketProtocol) connection.send_event_response = Mock() @@ -137,7 +137,10 @@ def test_send_next_event_to_connection(next_expected_event_id: int, can_receive_ connection.send_event_response.assert_has_calls(calls) -def _get_factory(n_starting_events: int = 0, clock: HeapClock = HeapClock()) -> EventWebsocketFactory: +def _get_factory( + n_starting_events: int = 0, + clock: MemoryReactorHeapClock = MemoryReactorHeapClock() +) -> EventWebsocketFactory: event_storage = EventMemoryStorage() for event_id in range(n_starting_events): diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index bb4faeae5..5b5f0b475 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -75,8 +75,10 @@ def test_feature(self) -> None: } ) - feature_service = artifacts.feature_service - feature_service._feature_settings = feature_settings + feature_service = FeatureService( + feature_settings=feature_settings, + tx_storage=artifacts.tx_storage + ) feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -338,8 +340,10 @@ def test_reorg(self) -> None: ) } ) - feature_service = artifacts.feature_service - feature_service._feature_settings = feature_settings + feature_service = FeatureService( + feature_settings=feature_settings, + tx_storage=artifacts.tx_storage + ) feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -562,8 +566,10 @@ def test_feature_from_existing_storage(self) -> None: } ) - feature_service = artifacts1.feature_service - feature_service._feature_settings = feature_settings + feature_service = FeatureService( + feature_settings=feature_settings, + tx_storage=artifacts1.tx_storage + ) feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, @@ -620,8 +626,10 @@ def test_feature_from_existing_storage(self) -> None: artifacts2 = self.simulator.create_artifacts(builder) # new feature_service is created with the same storage generated above - feature_service = artifacts2.feature_service - feature_service._feature_settings = feature_settings + feature_service = FeatureService( + feature_settings=feature_settings, + tx_storage=artifacts2.tx_storage + ) feature_resource = FeatureResource( feature_settings=feature_settings, feature_service=feature_service, diff --git a/tests/feature_activation/test_mining_simulation.py b/tests/feature_activation/test_mining_simulation.py new file mode 100644 index 000000000..d8e785a49 --- /dev/null +++ b/tests/feature_activation/test_mining_simulation.py @@ -0,0 +1,198 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +from json import JSONDecodeError +from typing import Any, Iterable +from unittest.mock import Mock + +from twisted.internet.testing import StringTransport + +from hathor.conf import HathorSettings as get_settings +from hathor.conf.settings import HathorSettings +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.model.criteria import Criteria +from hathor.feature_activation.settings import Settings as FeatureSettings +from hathor.mining.ws import MiningWebsocketFactory, MiningWebsocketProtocol +from hathor.p2p.resources import MiningResource +from hathor.transaction.resources import GetBlockTemplateResource +from hathor.transaction.util import unpack, unpack_len +from hathor.util import json_loadb +from tests import unittest +from tests.resources.base_resource import StubSite +from tests.simulation.base import SimulatorTestCase + + +class BaseMiningSimulationTest(SimulatorTestCase): + def test_signal_bits_in_mining(self) -> None: + settings_dict = get_settings()._asdict() + settings_dict.update( + FEATURE_ACTIVATION=FeatureSettings( + evaluation_interval=4, + default_threshold=3, + features={ + Feature.NOP_FEATURE_1: Criteria( + bit=0, + start_height=8, + timeout_height=20, + version='0.0.0', + signal_support_by_default=True + ), + Feature.NOP_FEATURE_2: Criteria( + bit=2, + start_height=12, + timeout_height=24, + version='0.0.0' + ), + } + ) + ) + settings = HathorSettings(**settings_dict) + + builder = self.simulator.get_default_builder() \ + .set_settings(settings) \ + .set_features(support_features={Feature.NOP_FEATURE_2}, not_support_features=set()) + + manager = self.simulator.create_peer(builder) + manager.allow_mining_without_peers() + miner = self.simulator.create_miner(manager, hashpower=1e6) + miner.start() + + # There are 3 resources available for miners, and all of them should contain the correct signal_bits + get_block_template_resource = GetBlockTemplateResource(manager) + get_block_template_client = StubSite(get_block_template_resource) + + mining_resource = MiningResource(manager) + mining_client = StubSite(mining_resource) + + ws_factory = MiningWebsocketFactory(manager) + ws_factory.openHandshakeTimeout = 0 + ws_protocol = ws_factory.buildProtocol(addr=Mock()) + ws_transport = StringTransport() + ws_protocol.makeConnection(ws_transport) + ws_protocol.state = MiningWebsocketProtocol.STATE_OPEN + ws_protocol.onOpen() + + # At the beginning, all features are outside their signaling period, so none are signaled. + expected_signal_bits = 0b0000 + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] + miner.pause_after_exactly(n_blocks=1) + self.simulator.run(3600) + assert self._get_signal_bits_from_get_block_template(get_block_template_client) == expected_signal_bits + assert self._get_signal_bits_from_mining(mining_client) == expected_signal_bits + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] + + miner.pause_after_exactly(n_blocks=6) + self.simulator.run(3600) + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] * 6 + + # At height=8, NOP_FEATURE_1 is signaling, so it's enabled by the default support. + expected_signal_bits = 0b0001 + miner.pause_after_exactly(n_blocks=1) + self.simulator.run(3600) + assert self._get_signal_bits_from_get_block_template(get_block_template_client) == expected_signal_bits + assert self._get_signal_bits_from_mining(mining_client) == expected_signal_bits + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] + + miner.pause_after_exactly(n_blocks=3) + self.simulator.run(3600) + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] * 3 + + # At height=12, NOP_FEATURE_2 is signaling, enabled by the user. NOP_FEATURE_1 also continues signaling. + expected_signal_bits = 0b0101 + miner.pause_after_exactly(n_blocks=1) + self.simulator.run(3600) + assert self._get_signal_bits_from_get_block_template(get_block_template_client) == expected_signal_bits + assert self._get_signal_bits_from_mining(mining_client) == expected_signal_bits + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] + + miner.pause_after_exactly(n_blocks=7) + self.simulator.run(3600) + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] * 7 + + # At height=20, NOP_FEATURE_1 stops signaling, and NOP_FEATURE_2 continues. + expected_signal_bits = 0b0100 + miner.pause_after_exactly(n_blocks=1) + self.simulator.run(3600) + assert self._get_signal_bits_from_get_block_template(get_block_template_client) == expected_signal_bits + assert self._get_signal_bits_from_mining(mining_client) == expected_signal_bits + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] + + miner.pause_after_exactly(n_blocks=3) + self.simulator.run(3600) + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] * 3 + + # At height=24, all features have left their signaling period and therefore none are signaled. + expected_signal_bits = 0b0000 + miner.pause_after_exactly(n_blocks=1) + self.simulator.run(3600) + assert self._get_signal_bits_from_get_block_template(get_block_template_client) == expected_signal_bits + assert self._get_signal_bits_from_mining(mining_client) == expected_signal_bits + assert self._get_ws_signal_bits(ws_transport) == [expected_signal_bits] + + def _get_signal_bits_from_get_block_template(self, web_client: StubSite) -> int: + result = self._get_result(web_client) + return result['signal_bits'] + + def _get_signal_bits_from_mining(self, web_client: StubSite) -> int: + result = self._get_result(web_client) + block_bytes = base64.b64decode(result['block_bytes']) + return block_bytes[0] + + @staticmethod + def _get_result(web_client: StubSite) -> dict[str, Any]: + response = web_client.get('') + return response.result.json_value() + + def _get_ws_signal_bits(self, transport: StringTransport) -> list[int]: + messages = self._get_transport_messages(transport) + signal_bits = [message['params'][0]['signal_bits'] for message in messages] + + return signal_bits + + def _get_transport_messages(self, transport: StringTransport) -> list[dict[str, Any]]: + values = transport.value() + result = self._decode_values(values) + + transport.clear() + + return list(result) + + @staticmethod + def _decode_values(values: bytes) -> Iterable[dict[str, Any]]: + buf = values + + while buf: + try: + (_, _, value_length), new_buf = unpack('!BBH', buf) + value, new_buf = unpack_len(value_length, new_buf) + yield json_loadb(value) + except JSONDecodeError: + (_, value_length), new_buf = unpack('!BB', buf) + value, new_buf = unpack_len(value_length, new_buf) + yield json_loadb(value) + + buf = new_buf + + +class SyncV1MiningSimulationTest(unittest.SyncV1Params, BaseMiningSimulationTest): + __test__ = True + + +class SyncV2MiningSimulationTest(unittest.SyncV2Params, BaseMiningSimulationTest): + __test__ = True + + +class SyncBridgeMiningSimulationTest(unittest.SyncBridgeParams, BaseMiningSimulationTest): + __test__ = True diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 97b7955c9..d149478cb 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -74,7 +74,7 @@ def test_connections_manager_integration(self): self.assertEquals(manager.metrics.known_peers, 3) self.assertEquals(manager.metrics.connected_peers, 2) self.assertEquals(manager.metrics.handshaking_peers, 1) - self.assertEquals(manager.metrics.connecting_peers, 0) + self.assertEquals(manager.metrics.connecting_peers, 1) manager.metrics.stop() diff --git a/tests/p2p/test_capabilities.py b/tests/p2p/test_capabilities.py index 874267910..a9bae9c02 100644 --- a/tests/p2p/test_capabilities.py +++ b/tests/p2p/test_capabilities.py @@ -1,6 +1,6 @@ from hathor.conf import HathorSettings from hathor.p2p.sync_v1.agent import NodeSyncTimestamp -from hathor.p2p.sync_v2.manager import NodeBlockSync +from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.simulator import FakeConnection from tests import unittest diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index 34730afb0..03f56358f 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -23,6 +23,6 @@ def test_manager_connections(self): endpoint = 'tcp://127.0.0.1:8005' manager.connections.connect_to(endpoint, use_ssl=True) - self.assertNotIn(endpoint, manager.connections.iter_not_ready_endpoints()) + self.assertIn(endpoint, manager.connections.iter_not_ready_endpoints()) self.assertNotIn(endpoint, manager.connections.iter_ready_connections()) self.assertNotIn(endpoint, manager.connections.iter_all_connections()) diff --git a/tests/p2p/test_get_best_blockchain.py b/tests/p2p/test_get_best_blockchain.py index 367fdc70e..a37e4a742 100644 --- a/tests/p2p/test_get_best_blockchain.py +++ b/tests/p2p/test_get_best_blockchain.py @@ -1,12 +1,17 @@ +from twisted.internet.defer import inlineCallbacks + from hathor.conf import HathorSettings from hathor.indexes.height_index import HeightInfo from hathor.manager import DEFAULT_CAPABILITIES from hathor.p2p.messages import ProtocolMessages +from hathor.p2p.resources import StatusResource from hathor.p2p.states import ReadyState +from hathor.p2p.utils import to_height_info from hathor.simulator import FakeConnection from hathor.simulator.trigger import StopAfterNMinedBlocks from hathor.util import json_dumps from tests import unittest +from tests.resources.base_resource import StubSite from tests.simulation.base import SimulatorTestCase settings = HathorSettings() @@ -339,6 +344,106 @@ def test_stop_looping_on_exit(self): self.assertIsNotNone(state2.lc_get_best_blockchain) self.assertFalse(state2.lc_get_best_blockchain.running) + @inlineCallbacks + def test_best_blockchain_from_status_resource(self): + manager1 = self.create_peer() + manager2 = self.create_peer() + conn12 = FakeConnection(manager1, manager2, latency=0.05) + self.simulator.add_connection(conn12) + self.simulator.run(60) + + # check /status before generate blocks + self.web = StubSite(StatusResource(manager1)) + response = yield self.web.get("status") + data = response.json_value() + connections = data.get('connections') + self.assertEqual(len(connections['connected_peers']), 1) + dag = data.get('dag') + + # connected_peers + # assert there is the genesis block + peer_best_blockchain = connections['connected_peers'][0]['peer_best_blockchain'] + self.assertEqual(len(peer_best_blockchain), 1) + # assert the height_info height is from genesis + raw_height_info_height = peer_best_blockchain[0][0] + self.assertEqual(raw_height_info_height, 0) + + # dag + # assert there is the genesis block + peer_best_blockchain = dag['best_blockchain'] + self.assertEqual(len(peer_best_blockchain), 1) + # assert the height_info height is from genesis + raw_height_info_height = peer_best_blockchain[0][0] + self.assertEqual(raw_height_info_height, 0) + + # mine 20 blocks + miner = self.simulator.create_miner(manager1, hashpower=1e6) + miner.start() + trigger = StopAfterNMinedBlocks(miner, quantity=20) + self.assertTrue(self.simulator.run(1440, trigger=trigger)) + miner.stop() + # let the blocks to propagate + self.simulator.run(60) + + # check /status after mine blocks + response = yield self.web.get("status") + data = response.json_value() + connections = data.get('connections') + self.assertEqual(len(connections['connected_peers']), 1) + dag = data.get('dag') + + # connected_peers + # assert default peer_best_blockchain length + peer_best_blockchain = connections['connected_peers'][0]['peer_best_blockchain'] + self.assertEqual(len(peer_best_blockchain), settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) + + # assert a raw_height_info can be converted to HeightInfo + try: + raw_height_info = peer_best_blockchain[0] + height_info = to_height_info(raw_height_info) + # assert the first element height is from the lastest block mined + self.assertEqual(height_info.height, 20) + except ValueError: + self.fail('Block info not valid') + + # assert decreasing order for a sequence of heights + height_sequence = [hi[0] for hi in peer_best_blockchain] + try: + self.assertTrue(check_decreasing_monotonicity(height_sequence)) + except ValueError as e: + self.fail(str(e)) + + # dag + # assert default peer_best_blockchain length + peer_best_blockchain = dag['best_blockchain'] + self.assertEqual(len(peer_best_blockchain), settings.DEFAULT_BEST_BLOCKCHAIN_BLOCKS) + + # assert a raw_height_info can be converted to HeightInfo + try: + raw_height_info = peer_best_blockchain[0] + height_info = to_height_info(raw_height_info) + # assert the first element height is from the lastest block mined + self.assertEqual(height_info.height, 20) + except ValueError: + self.fail('Block info not valid') + + # assert decreasing order for a sequence of heights + height_sequence = [hi[0] for hi in peer_best_blockchain] + try: + self.assertTrue(check_decreasing_monotonicity(height_sequence)) + except ValueError as e: + self.fail(str(e)) + + +def check_decreasing_monotonicity(sequence: list[int]) -> bool: + """Check if a sequence is monotonic and is decreasing. Raise an exception otherwise. + """ + n = len(sequence) + for i in range(1, n): + if sequence[i] >= sequence[i-1]: + raise ValueError(f'Sequence not monotonic. Value {sequence[i]} >= {sequence[i-1]}. Index: {i}.') + return True + class SyncV1GetBestBlockchainTestCase(unittest.SyncV1Params, BaseGetBestBlockchainTestCase): __test__ = True diff --git a/tests/pubsub/test_pubsub2.py b/tests/pubsub/test_pubsub2.py index 0ecd01540..faaf9c758 100644 --- a/tests/pubsub/test_pubsub2.py +++ b/tests/pubsub/test_pubsub2.py @@ -12,38 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, cast +from typing import Callable from unittest.mock import Mock, patch import pytest -from twisted.internet.posixbase import PosixReactorBase -from twisted.internet.task import Clock from twisted.internet.testing import MemoryReactorClock from hathor.pubsub import HathorEvents, PubSubManager -def test_clock() -> None: - """ - Running the PubSub with a Clock makes it call the handler function with callLater, so a plain function call - gets executed before the handler. - """ - reactor = Clock() - pubsub = PubSubManager(cast(PosixReactorBase, reactor)) - handler = Mock() - - pubsub.subscribe(HathorEvents.MANAGER_ON_START, handler) - pubsub.publish(HathorEvents.MANAGER_ON_START) - - handler(HathorEvents.MANAGER_ON_STOP) - - reactor.advance(0) - - assert len(handler.call_args_list) == 2 - assert handler.call_args_list[0].args[0] == HathorEvents.MANAGER_ON_STOP - assert handler.call_args_list[1].args[0] == HathorEvents.MANAGER_ON_START - - @pytest.mark.parametrize('is_in_main_thread', [False, True]) def test_memory_reactor_clock_not_running(is_in_main_thread: bool) -> None: """ @@ -52,12 +29,12 @@ def test_memory_reactor_clock_not_running(is_in_main_thread: bool) -> None: """ reactor = MemoryReactorClock() reactor.running = False - pubsub = PubSubManager(cast(PosixReactorBase, reactor)) + pubsub = PubSubManager(reactor) handler = Mock() pubsub.subscribe(HathorEvents.MANAGER_ON_START, handler) - with patch('hathor.util.isInIOThread', lambda: is_in_main_thread): + with patch('hathor.pubsub.isInIOThread', lambda: is_in_main_thread): pubsub.publish(HathorEvents.MANAGER_ON_START) handler(HathorEvents.MANAGER_ON_STOP) @@ -74,12 +51,12 @@ def test_memory_reactor_clock_running_no_threading() -> None: """ reactor = MemoryReactorClock() reactor.running = True - pubsub = PubSubManager(cast(PosixReactorBase, reactor)) + pubsub = PubSubManager(reactor) handler = Mock() pubsub.subscribe(HathorEvents.MANAGER_ON_START, handler) - with patch('hathor.util.isInIOThread', lambda: True): + with patch('hathor.pubsub.isInIOThread', lambda: True): pubsub.publish(HathorEvents.MANAGER_ON_START) handler(HathorEvents.MANAGER_ON_STOP) @@ -98,7 +75,7 @@ def test_memory_reactor_clock_running_with_threading() -> None: """ reactor = MemoryReactorClock() reactor.running = True - pubsub = PubSubManager(cast(PosixReactorBase, reactor)) + pubsub = PubSubManager(reactor) handler = Mock() def fake_call_from_thread(f: Callable) -> None: @@ -109,7 +86,10 @@ def fake_call_from_thread(f: Callable) -> None: pubsub.subscribe(HathorEvents.MANAGER_ON_START, handler) - with patch('hathor.util.isInIOThread', lambda: False): + with ( + patch('hathor.pubsub.isInIOThread', lambda: False), + patch('hathor.utils.zope.verifyObject', lambda _a, _b: True) + ): pubsub.publish(HathorEvents.MANAGER_ON_START) handler(HathorEvents.MANAGER_ON_STOP) diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index 885abfebf..0981794bd 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -43,6 +43,7 @@ def test_get_block_template_with_address(self): }, 'tokens': [], 'data': '', + 'signal_bits': 0 }) @inlineCallbacks @@ -75,6 +76,7 @@ def test_get_block_template_without_address(self): }, 'tokens': [], 'data': '', + 'signal_bits': 0 }) @inlineCallbacks diff --git a/tests/simulation/test_simulator.py b/tests/simulation/test_simulator.py index d39cf81e0..cce6c795b 100644 --- a/tests/simulation/test_simulator.py +++ b/tests/simulation/test_simulator.py @@ -44,7 +44,7 @@ def test_two_nodes(self): self.simulator.add_connection(conn12) self.simulator.run(60) - miner2 = self.simulator.create_miner(manager2, hashpower=100e6) + miner2 = self.simulator.create_miner(manager2, hashpower=10e9) miner2.start() self.simulator.run(120) @@ -57,7 +57,7 @@ def test_two_nodes(self): gen_tx1.stop() gen_tx2.stop() - self.assertTrue(self.simulator.run(600, trigger=StopWhenSynced(conn12))) + self.assertTrue(self.simulator.run(3000, trigger=StopWhenSynced(conn12))) self.assertTrue(conn12.is_connected) self.assertTipsEqual(manager1, manager2) diff --git a/tests/sysctl/test_sysctl.py b/tests/sysctl/test_sysctl.py index 55137105c..9ae554b9e 100644 --- a/tests/sysctl/test_sysctl.py +++ b/tests/sysctl/test_sysctl.py @@ -6,6 +6,7 @@ from hathor.sysctl import Sysctl from hathor.sysctl.exception import SysctlEntryNotFound, SysctlReadOnlyEntry, SysctlWriteOnlyEntry from hathor.sysctl.factory import SysctlFactory +from hathor.sysctl.runner import SysctlRunner from hathor.sysctl.sysctl import SysctlCommand from tests import unittest @@ -61,7 +62,8 @@ def setUp(self) -> None: self.root.put_child('core', core) self.root.put_child('ab.bc.cd', multi) - factory = SysctlFactory(self.root) + runner = SysctlRunner(self.root) + factory = SysctlFactory(runner) self.proto = factory.buildProtocol(('127.0.0.1', 0)) self.tr = proto_helpers.StringTransport() self.proto.makeConnection(self.tr) @@ -237,13 +239,14 @@ def test_proto_backup(self) -> None: self.proto.lineReceived(b'!backup') output = self.tr.value() lines = set(output.split(b'\n')) - self.assertEqual(lines, { - b'core.loglevel="info"', + expected = { b'net.max_connections=3', b'net.rate_limit=4, 1', b'net.readonly=0.25', + b'core.loglevel="info"', b'', # output ends with a new line (\n) - }) + } + self.assertEqual(lines, expected) def test_proto_help(self) -> None: self.proto.lineReceived(b'!help') diff --git a/tests/test_memory_reactor_clock.py b/tests/test_memory_reactor_clock.py new file mode 100644 index 000000000..1ba9eda12 --- /dev/null +++ b/tests/test_memory_reactor_clock.py @@ -0,0 +1,24 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from twisted.internet.testing import MemoryReactorClock + + +class TestMemoryReactorClock(MemoryReactorClock): + def run(self): + """ + We have to override MemoryReactor.run() because the original Twisted implementation weirdly calls stop() inside + run(), and we need the reactor running during our tests. + """ + self.running = True diff --git a/tests/tx/test_mining.py b/tests/tx/test_mining.py index 468923c31..064739ad3 100644 --- a/tests/tx/test_mining.py +++ b/tests/tx/test_mining.py @@ -30,7 +30,7 @@ def setUp(self): self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] - def test_block_template_after_genesis(self): + def test_block_template_after_genesis(self) -> None: manager = self.create_peer('testnet', tx_storage=self.tx_storage) block_templates = manager.get_block_templates() @@ -47,9 +47,10 @@ def test_block_template_after_genesis(self): parents_any=[], height=1, # genesis is 0 score=sum_weights(self.genesis_blocks[0].weight, 1.0), + signal_bits=0 )) - def test_regular_block_template(self): + def test_regular_block_template(self) -> None: manager = self.create_peer('testnet', tx_storage=self.tx_storage) # add 100 blocks @@ -69,6 +70,7 @@ def test_regular_block_template(self): parents_any=[], height=101, # genesis is 0 score=sum_weights(blocks[-1].get_metadata().score, 1.0), + signal_bits=0 )) self.assertConsensusValid(manager) diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 65b53d843..7c3f66e49 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -1137,6 +1137,60 @@ def test_sigops_input_multi_below_limit(self) -> None: tx.update_hash() tx.verify_sigops_input() + def test_compare_bytes_equal(self) -> None: + # create some block + [block1] = add_new_blocks(self.manager, 1, advance_clock=1) + + # clone it to make sure we have a new instance + block2 = block1.clone() + + # the storage already has block1 and should correctly return True + self.assertTrue(self.tx_storage.compare_bytes_with_local_tx(block2)) + + def test_compare_bytes_different(self) -> None: + # create some block + [block1] = add_new_blocks(self.manager, 1, advance_clock=1) + + # clone it and change something, doesn't matter what it is + # XXX: note the hash is not being update on purpose, we expect a failure even if the hash hasn't changed + block2 = block1.clone() + block2.weight += 1 + + # the storage already has block1 and should correctly return False + self.assertFalse(self.tx_storage.compare_bytes_with_local_tx(block2)) + + def test_compare_bytes_partially_validated_equal(self) -> None: + from hathor.transaction.validation_state import ValidationState + + # create some block, make it partially valid and save it + [block1] = add_new_blocks(self.manager, 1, advance_clock=1) + block1.set_validation(ValidationState.BASIC) + with self.tx_storage.allow_partially_validated_context(): + self.tx_storage.save_transaction(block1) + + # clone it to make sure we have a new instance + block2 = block1.clone() + + # the storage already has block1 and should correctly return True + self.assertTrue(self.tx_storage.compare_bytes_with_local_tx(block2)) + + def test_compare_bytes_partially_validated_different(self) -> None: + from hathor.transaction.validation_state import ValidationState + + # create some block, make it partially valid and save it + [block1] = add_new_blocks(self.manager, 1, advance_clock=1) + block1.set_validation(ValidationState.BASIC) + with self.tx_storage.allow_partially_validated_context(): + self.tx_storage.save_transaction(block1) + + # clone it and change something, doesn't matter what it is + # XXX: note the hash is not being update on purpose, we expect a failure even if the hash hasn't changed + block2 = block1.clone() + block2.weight += 1 + + # the storage already has block1 and should correctly return False + self.assertFalse(self.tx_storage.compare_bytes_with_local_tx(block2)) + class SyncV1TransactionTest(unittest.SyncV1Params, BaseTransactionTest): __test__ = True diff --git a/tests/unittest.py b/tests/unittest.py index c6fe0b213..9eab1e9c4 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -7,7 +7,6 @@ from unittest import main as ut_main from structlog import get_logger -from twisted.internet.task import Clock from twisted.trial import unittest from hathor.builder import BuildArtifacts, Builder @@ -19,6 +18,7 @@ from hathor.transaction import BaseTransaction from hathor.util import Random, Reactor, reactor from hathor.wallet import HDWallet, Wallet +from tests.test_memory_reactor_clock import TestMemoryReactorClock logger = get_logger() main = ut_main @@ -105,8 +105,7 @@ class TestCase(unittest.TestCase): def setUp(self): _set_test_mode(TestMode.TEST_ALL_WEIGHT) self.tmpdirs = [] - # XXX: changing this clock to a MemoryReactorClock will break a lot of tests - self.clock = Clock() + self.clock = TestMemoryReactorClock() self.clock.advance(time.time()) self.log = logger.new() self.reset_peer_id_pool() @@ -174,6 +173,7 @@ def create_peer_from_builder(self, builder, start_manager=True): if start_manager: manager.start() + self.clock.run() self.run_to_completion() return manager diff --git a/tests/utils.py b/tests/utils.py index 9d5e16f77..64c578ea6 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -6,7 +6,7 @@ import time import urllib.parse from dataclasses import dataclass -from typing import Iterator, Optional, TypeVar, cast +from typing import Optional, cast import requests from hathorlib.scripts import DataScript @@ -717,48 +717,3 @@ def create_event(cls, event_id: int) -> BaseEvent: type=EventType.VERTEX_METADATA_CHANGED, data=cls.tx_data ) - - -T = TypeVar('T') - - -def zip_chunkify(flat_list: list[T], chunked_list: list[list[T]]) -> Iterator[tuple[list[T], list[T]]]: - """ - Takes two lists, one flat and one chunked. Chunks the first one into chunks of the same size as the second. - Returns a zipped list where each item is a tuple of chunks, one from each list. - - >>> list(zip_chunkify([], [])) - [] - >>> list(zip_chunkify([], [[]])) - [([], [])] - >>> list(zip_chunkify([], [[], []])) - [([], []), ([], [])] - >>> list(zip_chunkify([1], [[2]])) - [([1], [2])] - >>> list(zip_chunkify([1, 1], [[2]])) - Traceback (most recent call last): - ... - ValueError: lists should have the same amount of items - >>> list(zip_chunkify([1], [[2], [2]])) - Traceback (most recent call last): - ... - ValueError: lists should have the same amount of items - >>> list(zip_chunkify([1, 1], [[2], [2]])) - [([1], [2]), ([1], [2])] - >>> list(zip_chunkify([0, 2, 4, 6, 8, 10, 12], [[1], [3, 5], [7], [9, 11, 13]])) - [([0], [1]), ([2, 4], [3, 5]), ([6], [7]), ([8, 10, 12], [9, 11, 13])] - """ - - flat_list_len = len(flat_list) - chunked_list_len = sum(map(len, chunked_list)) - - if flat_list_len != chunked_list_len: - raise ValueError(f'lists should have the same amount of items. ' - f'{flat_list_len} (flat) != {chunked_list_len} (chunked)') - - flat_iter = iter(flat_list) - - for chunk in chunked_list: - items = [next(flat_iter) for _ in chunk] - - yield items, chunk